File size: 8,179 Bytes
d9e303b f2542bb d9e303b f2542bb f72da4f f2542bb f72da4f f2542bb f72da4f f2542bb f72da4f f2542bb d9e303b f2542bb d9e303b f2542bb d9e303b f2542bb d9e303b f2542bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 |
import os
import streamlit as st
import tempfile
from pathlib import Path
from typing import Dict, List
# Import the gap analysis pipeline - use our new implementation
from attached_assets.ifrs9_analysis import run_gap_analysis_pipeline, GapAnalysisState
from attached_assets.file_handler import extract_text_from_file
from attached_assets.consolidated_analysis import generate_consolidated_analysis, display_consolidated_analysis
from attached_assets.pdf_generator import generate_gap_analysis_pdf, generate_consolidated_pdf
# Configure page
st.set_page_config(
page_title="IFRS9 Validation GAP Analysis",
page_icon="π",
layout="wide"
)
# Parameter 'results' is the output of run_gap_analysis_pipeline() which returns GapAnalysisState
def display_results(results: GapAnalysisState) -> None:
"""Display the GAP analysis results in a structured format."""
st.header("π GAP Analysis Results")
# Convert results to dict if needed
queries = results.queries if hasattr(results, 'queries') else results.get('queries', [])
gap_analyses = results.gap_analyses if hasattr(results, 'gap_analyses') else results.get('gap_analyses', {})
if not queries:
st.error("No queries generated. Please try running the analysis again.")
return
# Generate consolidated analysis if not already present
if 'consolidated_analysis' not in st.session_state:
with st.spinner("Generating consolidated analysis..."):
try:
consolidated_analysis = generate_consolidated_analysis(queries, gap_analyses)
st.session_state.consolidated_analysis = consolidated_analysis
except Exception as e:
st.error(f"Error generating consolidated analysis: {str(e)}")
st.exception(e)
# Create tabs for different views
tabs = st.tabs(["π Consolidated Analysis", "π Individual Queries"])
# Display consolidated analysis
with tabs[0]:
if 'consolidated_analysis' in st.session_state:
# Display the consolidated analysis with visualizations
display_consolidated_analysis(st.session_state.consolidated_analysis)
# Add PDF Export button for consolidated report
if st.button("π Export Consolidated Report as PDF"):
try:
pdf_path = generate_consolidated_pdf(st.session_state.consolidated_analysis, queries, gap_analyses)
with open(pdf_path, "rb") as pdf_file:
pdf_bytes = pdf_file.read()
st.download_button(
label="β¬οΈ Download Consolidated PDF Report",
data=pdf_bytes,
file_name="consolidated_gap_analysis_report.pdf",
mime="application/pdf"
)
# Clean up the file after offering download
os.remove(pdf_path)
except Exception as e:
st.error(f"Error generating consolidated PDF: {str(e)}")
# Display individual query analyses
with tabs[1]:
# Add PDF Export button for individual queries
if st.button("π Export Individual Queries as PDF"):
try:
pdf_path = generate_gap_analysis_pdf(queries, gap_analyses)
with open(pdf_path, "rb") as pdf_file:
pdf_bytes = pdf_file.read()
st.download_button(
label="β¬οΈ Download Individual Queries PDF",
data=pdf_bytes,
file_name="individual_queries_report.pdf",
mime="application/pdf"
)
# Clean up the file after offering download
os.remove(pdf_path)
except Exception as e:
st.error(f"Error generating PDF: {str(e)}")
# Create sub-tabs for query-based navigation
query_tabs = st.tabs([f"Query {i+1}" for i in range(len(queries))])
# Fill each tab with the corresponding query and response
for i, (tab, query) in enumerate(zip(query_tabs, queries)):
with tab:
if query in gap_analyses:
response = gap_analyses[query]
st.markdown(f"### πΉ {query}")
st.markdown(f"β‘οΈ {response}")
else:
st.warning(f"No analysis available for this query.")
def main():
"""Main application function."""
st.title("IFRS9 Validation GAP Analysis")
st.markdown("""
This application allows you to upload IFRS9 validation reports and generates
a comprehensive GAP analysis using advanced language models.
""")
# File uploader widget
uploaded_file = st.file_uploader(
"Upload your IFRS9 validation report",
type=['txt', 'pdf', 'docx', 'doc'],
help="Upload a text, PDF, or Word document containing your IFRS9 validation report."
)
# Initialize variables
run_analysis = False
file_contents = ""
if uploaded_file is not None:
try:
# Extract text from the uploaded file based on its type
file_contents = extract_text_from_file(uploaded_file)
# Display file summary
st.subheader("Document Preview")
st.markdown(f"**File:** {uploaded_file.name}")
file_size = len(uploaded_file.getvalue()) / 1024
st.markdown(f"**Size:** {file_size:.2f} KB")
st.markdown(f"**File Type:** {uploaded_file.name.split('.')[-1].upper()}")
# Show preview of the extracted text
with st.expander("Preview document content", expanded=False):
preview_text = file_contents[:5000] + "..." if len(file_contents) > 5000 else file_contents
st.text_area("Document content (preview)", preview_text, height=200)
# Process button
if st.button("Run GAP Analysis", type="primary"):
run_analysis = True
except Exception as e:
st.error(f"β Error processing file: {str(e)}")
st.exception(e)
# Run the analysis when requested
if run_analysis and file_contents:
with st.spinner("Running GAP analysis... This may take several minutes depending on document length."):
try:
# Clear previous results to avoid duplication
if 'results' in st.session_state:
del st.session_state.results
if 'consolidated_analysis' in st.session_state:
del st.session_state.consolidated_analysis
# Run the GAP analysis pipeline with the uploaded document
results = run_gap_analysis_pipeline(file_contents)
# Convert results to dict for session state storage
st.session_state.results = {
'queries': results["queries"],
'gap_analyses': results["gap_analyses"]
}
except Exception as e:
st.error(f"β Error during GAP analysis: {str(e)}")
st.exception(e)
# Display results if they exist in session state
if 'results' in st.session_state:
display_results(st.session_state.results)
# Application info
st.sidebar.header("About")
st.sidebar.markdown("""
This application performs GAP analysis on IFRS9 validation reports using
LangChain and LangGraph with GPT-4o.
The analysis includes:
- Generating critical GAP analysis questions
- Retrieving relevant information
- Filtering pertinent documents
- Performing detailed GAP analysis
""")
# API key check (status only, not the actual key)
api_key_status = "β
Available" if os.environ.get("OPENAI_API_KEY") else "β Missing"
st.sidebar.markdown(f"**OpenAI API Key:** {api_key_status}")
if __name__ == "__main__":
main()
|