import os import streamlit as st import tempfile from pathlib import Path from typing import Dict, List # Import the gap analysis pipeline - use our new implementation from attached_assets.ifrs9_analysis import run_gap_analysis_pipeline, GapAnalysisState from attached_assets.file_handler import extract_text_from_file from attached_assets.consolidated_analysis import generate_consolidated_analysis, display_consolidated_analysis from attached_assets.pdf_generator import generate_gap_analysis_pdf, generate_consolidated_pdf # Configure page st.set_page_config( page_title="IFRS9 Validation GAP Analysis", page_icon="📊", layout="wide" ) # Parameter 'results' is the output of run_gap_analysis_pipeline() which returns GapAnalysisState def display_results(results: GapAnalysisState) -> None: """Display the GAP analysis results in a structured format.""" st.header("📝 GAP Analysis Results") # Convert results to dict if needed queries = results.queries if hasattr(results, 'queries') else results.get('queries', []) gap_analyses = results.gap_analyses if hasattr(results, 'gap_analyses') else results.get('gap_analyses', {}) if not queries: st.error("No queries generated. Please try running the analysis again.") return # Generate consolidated analysis if not already present if 'consolidated_analysis' not in st.session_state: with st.spinner("Generating consolidated analysis..."): try: consolidated_analysis = generate_consolidated_analysis(queries, gap_analyses) st.session_state.consolidated_analysis = consolidated_analysis except Exception as e: st.error(f"Error generating consolidated analysis: {str(e)}") st.exception(e) # Create tabs for different views tabs = st.tabs(["📊 Consolidated Analysis", "🔍 Individual Queries"]) # Display consolidated analysis with tabs[0]: if 'consolidated_analysis' in st.session_state: # Display the consolidated analysis with visualizations display_consolidated_analysis(st.session_state.consolidated_analysis) # Add PDF Export button for consolidated report if st.button("📄 Export Consolidated Report as PDF"): try: pdf_path = generate_consolidated_pdf(st.session_state.consolidated_analysis, queries, gap_analyses) with open(pdf_path, "rb") as pdf_file: pdf_bytes = pdf_file.read() st.download_button( label="⬇️ Download Consolidated PDF Report", data=pdf_bytes, file_name="consolidated_gap_analysis_report.pdf", mime="application/pdf" ) # Clean up the file after offering download os.remove(pdf_path) except Exception as e: st.error(f"Error generating consolidated PDF: {str(e)}") # Display individual query analyses with tabs[1]: # Add PDF Export button for individual queries if st.button("📄 Export Individual Queries as PDF"): try: pdf_path = generate_gap_analysis_pdf(queries, gap_analyses) with open(pdf_path, "rb") as pdf_file: pdf_bytes = pdf_file.read() st.download_button( label="⬇️ Download Individual Queries PDF", data=pdf_bytes, file_name="individual_queries_report.pdf", mime="application/pdf" ) # Clean up the file after offering download os.remove(pdf_path) except Exception as e: st.error(f"Error generating PDF: {str(e)}") # Create sub-tabs for query-based navigation query_tabs = st.tabs([f"Query {i+1}" for i in range(len(queries))]) # Fill each tab with the corresponding query and response for i, (tab, query) in enumerate(zip(query_tabs, queries)): with tab: if query in gap_analyses: response = gap_analyses[query] st.markdown(f"### 🔹 {query}") st.markdown(f"➡️ {response}") else: st.warning(f"No analysis available for this query.") def main(): """Main application function.""" st.title("IFRS9 Validation GAP Analysis") st.markdown(""" This application allows you to upload IFRS9 validation reports and generates a comprehensive GAP analysis using advanced language models. """) # File uploader widget uploaded_file = st.file_uploader( "Upload your IFRS9 validation report", type=['txt', 'pdf', 'docx', 'doc'], help="Upload a text, PDF, or Word document containing your IFRS9 validation report." ) # Initialize variables run_analysis = False file_contents = "" if uploaded_file is not None: try: # Extract text from the uploaded file based on its type file_contents = extract_text_from_file(uploaded_file) # Display file summary st.subheader("Document Preview") st.markdown(f"**File:** {uploaded_file.name}") file_size = len(uploaded_file.getvalue()) / 1024 st.markdown(f"**Size:** {file_size:.2f} KB") st.markdown(f"**File Type:** {uploaded_file.name.split('.')[-1].upper()}") # Show preview of the extracted text with st.expander("Preview document content", expanded=False): preview_text = file_contents[:5000] + "..." if len(file_contents) > 5000 else file_contents st.text_area("Document content (preview)", preview_text, height=200) # Process button if st.button("Run GAP Analysis", type="primary"): run_analysis = True except Exception as e: st.error(f"❌ Error processing file: {str(e)}") st.exception(e) # Run the analysis when requested if run_analysis and file_contents: with st.spinner("Running GAP analysis... This may take several minutes depending on document length."): try: # Clear previous results to avoid duplication if 'results' in st.session_state: del st.session_state.results if 'consolidated_analysis' in st.session_state: del st.session_state.consolidated_analysis # Run the GAP analysis pipeline with the uploaded document results = run_gap_analysis_pipeline(file_contents) # Convert results to dict for session state storage st.session_state.results = { 'queries': results["queries"], 'gap_analyses': results["gap_analyses"] } except Exception as e: st.error(f"❌ Error during GAP analysis: {str(e)}") st.exception(e) # Display results if they exist in session state if 'results' in st.session_state: display_results(st.session_state.results) # Application info st.sidebar.header("About") st.sidebar.markdown(""" This application performs GAP analysis on IFRS9 validation reports using LangChain and LangGraph with GPT-4o. The analysis includes: - Generating critical GAP analysis questions - Retrieving relevant information - Filtering pertinent documents - Performing detailed GAP analysis """) # API key check (status only, not the actual key) api_key_status = "✅ Available" if os.environ.get("OPENAI_API_KEY") else "❌ Missing" st.sidebar.markdown(f"**OpenAI API Key:** {api_key_status}") if __name__ == "__main__": main()