import os import time import tempfile import gradio as gr import warnings from pathlib import Path import PyPDF2 import markdown from datetime import datetime, timedelta from collections import defaultdict import threading warnings.filterwarnings('ignore') # Rate limiting class RateLimiter: def __init__(self, max_requests=5, time_window=60): self.max_requests = max_requests self.time_window = time_window self.requests = defaultdict(list) self.lock = threading.Lock() def is_allowed(self, user_id): with self.lock: now = datetime.now() # Clean old requests self.requests[user_id] = [ req_time for req_time in self.requests[user_id] if now - req_time < timedelta(seconds=self.time_window) ] if len(self.requests[user_id]) >= self.max_requests: return False self.requests[user_id].append(now) return True # Global rate limiter rate_limiter = RateLimiter(max_requests=3, time_window=300) # 3 requests per 5 minutes def extract_text_from_pdf(pdf_file): """Extract text from uploaded PDF file.""" try: reader = PyPDF2.PdfReader(pdf_file) text = "" for page in reader.pages: text += page.extract_text() + "\n" return text.strip() except Exception as e: return f"Error reading PDF: {str(e)}" def setup_crewai(): """Initialize CrewAI components.""" try: from crewai import Agent, Task, Crew from crewai_tools import ScrapeWebsiteTool, SerperDevTool from langchain_openai import ChatOpenAI # Initialize tools search_tool = SerperDevTool() scrape_tool = ScrapeWebsiteTool() # Initialize LLM llm = ChatOpenAI(model="gpt-4o-mini", temperature=0.3) # Create agents researcher = Agent( role="Job Requirements Analyst", goal="Extract and analyze key job requirements efficiently", tools=[scrape_tool, search_tool], verbose=False, backstory="Expert at quickly identifying essential job requirements and qualifications from job postings.", llm=llm, ) resume_strategist = Agent( role="Resume Enhancement Specialist", goal="Optimize resumes to match job requirements effectively", tools=[], verbose=False, backstory="Skilled at tailoring resumes to highlight relevant experience and skills for specific job applications.", llm=llm, ) return researcher, resume_strategist, llm except ImportError: raise Exception("CrewAI not installed. Please install required packages.") def create_tasks(researcher, resume_strategist, job_url, resume_text): """Create optimized tasks for the crew.""" from crewai import Task # Research task - focused and efficient research_task = Task( description=f""" Analyze the job posting at {job_url} and extract the top 10 most important: 1. Required skills and technologies 2. Key qualifications and experience levels 3. Preferred background and certifications Focus on the most critical requirements only. """, expected_output="A concise list of the top 10 most important job requirements.", agent=researcher, ) # Resume optimization task resume_task = Task( description=f""" Using the job requirements from the research task, optimize this resume: {resume_text} Instructions: 1. Rewrite the professional summary to align with the job 2. Highlight relevant experience and skills 3. Adjust technical skills section to match requirements 4. Ensure ATS-friendly formatting 5. Keep the same factual information but present it strategically Return the complete optimized resume in markdown format. """, expected_output="A complete, optimized resume in markdown format tailored to the job requirements.", agent=resume_strategist, context=[research_task] ) return research_task, resume_task def process_application(pdf_file, job_url, user_session): """Main processing function with rate limiting.""" # Rate limiting check if not rate_limiter.is_allowed(user_session): return "⚠️ Rate limit exceeded. Please wait 5 minutes before submitting another request.", "" if not pdf_file or not job_url: return "❌ Please provide both a PDF resume and job URL.", "" try: # Extract text from PDF with gr.Progress() as progress: progress(0.1, desc="Extracting text from PDF...") resume_text = extract_text_from_pdf(pdf_file) if "Error reading PDF" in resume_text: return f"❌ {resume_text}", "" progress(0.3, desc="Setting up AI agents...") researcher, resume_strategist, llm = setup_crewai() progress(0.5, desc="Creating optimization tasks...") research_task, resume_task = create_tasks(researcher, resume_strategist, job_url, resume_text) progress(0.7, desc="Analyzing job requirements...") # Execute tasks from crewai import Crew crew = Crew( agents=[researcher, resume_strategist], tasks=[research_task, resume_task], verbose=False ) progress(0.9, desc="Generating tailored resume...") result = crew.kickoff() progress(1.0, desc="Complete!") # Convert markdown to HTML for better display html_result = markdown.markdown(str(result)) return "✅ Resume successfully tailored!", html_result except Exception as e: return f"❌ Error processing your request: {str(e)}", "" def create_interface(): """Create the Gradio interface.""" with gr.Blocks( title="CV Tailor - AI Resume Optimizer", theme=gr.themes.Soft(), css=""" .gradio-container { max-width: 1200px; margin: auto; } .header { text-align: center; margin-bottom: 30px; } .rate-limit-info { background-color: #f0f8ff; padding: 10px; border-radius: 5px; margin-bottom: 20px; } """ ) as app: gr.HTML("""
Upload your PDF resume and job URL to get an AI-tailored resume that matches the job requirements!
Powered by CrewAI & OpenAI GPT-4o Mini | CrewAI | Built with ❤️ using Gradio