完成作业

This commit is contained in:
mh04194357 2025-12-23 13:00:33 +08:00
parent 5cc51bc2a1
commit 45330280a1

View File

@ -102,19 +102,17 @@ jobs:
working-directory: ${{ github.workspace }} working-directory: ${{ github.workspace }}
run: | run: |
echo "📦 Setting up local grading environment..." echo "📦 Setting up local grading environment..."
# Create autograde directory with simple grading scripts
mkdir -p .autograde mkdir -p .autograde
# Create grade_grouped.py # Create grade_grouped.py
cat > .autograde/grade_grouped.py << 'GRADE_SCRIPT' cat > .autograde/grade_grouped.py << 'EOF'
import json import json
import os import os
import sys import sys
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from pathlib import Path from pathlib import Path
def parse_junit_results(junit_dir): def parse_junit_results(junit_dir):
results = {"passed": 0, "failed": 0, "total": 0, "tests": []} results = {"passed": 0, "failed": 0, "total": 0, "tests": []}
junit_path = Path(junit_dir) junit_path = Path(junit_dir)
if not junit_path.exists(): if not junit_path.exists():
@ -139,7 +137,7 @@ jobs:
print(f"Error parsing {xml_file}: {e}") print(f"Error parsing {xml_file}: {e}")
return results return results
def main(): def main():
import argparse import argparse
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--junit-dir", required=True) parser.add_argument("--junit-dir", required=True)
@ -168,22 +166,22 @@ jobs:
f.write(f"- **Score**: {score}/60\n") f.write(f"- **Score**: {score}/60\n")
f.write(f"- **Passed**: {results['passed']}/{results['total']}\n\n") f.write(f"- **Passed**: {results['passed']}/{results['total']}\n\n")
for t in results["tests"]: for t in results["tests"]:
icon = "✅" if t["status"] == "passed" else "❌" icon = "PASS" if t["status"] == "passed" else "FAIL"
f.write(f"- {icon} {t['name']}\n") f.write(f"- {icon} {t['name']}\n")
print(f"Grade: {score}/60 ({results['passed']}/{results['total']} tests passed)") print(f"Grade: {score}/60 ({results['passed']}/{results['total']} tests passed)")
if __name__ == "__main__": if __name__ == "__main__":
main() main()
GRADE_SCRIPT EOF
# Create llm_grade.py (simplified - just gives points for having the file) # Create llm_grade.py
cat > .autograde/llm_grade.py << 'LLM_SCRIPT' cat > .autograde/llm_grade.py << 'EOF'
import json import json
import argparse import argparse
import os import os
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--question", default="") parser.add_argument("--question", default="")
parser.add_argument("--answer", required=True) parser.add_argument("--answer", required=True)
@ -217,25 +215,25 @@ jobs:
with open(args.summary, "w") as f: with open(args.summary, "w") as f:
f.write(f"# Report Grade\n\n- **Score**: {score}/10\n- **Feedback**: {feedback}\n") f.write(f"# Report Grade\n\n- **Score**: {score}/10\n- **Feedback**: {feedback}\n")
print(f"Report grade: {score}/10") print(f"Report grade: {score}/10")
if __name__ == "__main__": if __name__ == "__main__":
main() main()
LLM_SCRIPT EOF
# Create aggregate_final_grade.py # Create aggregate_final_grade.py
cat > .autograde/aggregate_final_grade.py << 'AGG_SCRIPT' cat > .autograde/aggregate_final_grade.py << 'EOF'
import json import json
import argparse import argparse
def load_json(path): def load_json(path):
try: try:
with open(path) as f: with open(path) as f:
return json.load(f) return json.load(f)
except: except:
return {} return {}
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--programming", default="grade.json") parser.add_argument("--programming", default="grade.json")
parser.add_argument("--report", default="report_grade.json") parser.add_argument("--report", default="report_grade.json")
@ -271,37 +269,34 @@ jobs:
f.write(f"| Frontend | {frontend_score}/10 |\n") f.write(f"| Frontend | {frontend_score}/10 |\n")
f.write(f"| **Total** | **{total}/80** |\n") f.write(f"| **Total** | **{total}/80** |\n")
print(f"🎯 Final grade: {total}/80") print(f"Final grade: {total}/80")
if __name__ == "__main__": if __name__ == "__main__":
main() main()
AGG_SCRIPT EOF
# Create minimal metadata script # Create stub scripts
cat > .autograde/create_minimal_metadata.py << 'META_SCRIPT' cat > .autograde/create_minimal_metadata.py << 'EOF'
import json import json
import os import os
grade_file = "final_grade.json"
grade_file = "final_grade.json" if os.path.exists(grade_file):
if os.path.exists(grade_file):
with open(grade_file) as f: with open(grade_file) as f:
data = json.load(f) data = json.load(f)
print(json.dumps({"grade": data.get("total", 0), "status": "graded"})) print(json.dumps({"grade": data.get("total", 0), "status": "graded"}))
else: else:
print(json.dumps({"grade": 0, "status": "error"})) print(json.dumps({"grade": 0, "status": "error"}))
META_SCRIPT EOF
# Create upload_metadata.py (stub) cat > .autograde/upload_metadata.py << 'EOF'
cat > .autograde/upload_metadata.py << 'UPLOAD_SCRIPT' print("Metadata upload skipped")
print("Metadata upload skipped (local mode)") EOF
UPLOAD_SCRIPT
# Create generate_pdf_report.py (stub) cat > .autograde/generate_pdf_report.py << 'EOF'
cat > .autograde/generate_pdf_report.py << 'PDF_SCRIPT' print("PDF generation skipped")
print("PDF generation skipped (local mode)") EOF
PDF_SCRIPT
echo "Local grading scripts created" echo "Local grading scripts created"
- name: Run tests - name: Run tests
working-directory: ${{ github.workspace }} working-directory: ${{ github.workspace }}