|
|
import os |
|
|
import streamlit as st |
|
|
import tempfile |
|
|
from pathlib import Path |
|
|
from typing import Dict, List |
|
|
|
|
|
|
|
|
from attached_assets.ifrs9_analysis import run_gap_analysis_pipeline, GapAnalysisState |
|
|
from attached_assets.file_handler import extract_text_from_file |
|
|
from attached_assets.consolidated_analysis import generate_consolidated_analysis, display_consolidated_analysis |
|
|
from attached_assets.pdf_generator import generate_gap_analysis_pdf, generate_consolidated_pdf |
|
|
|
|
|
|
|
|
st.set_page_config( |
|
|
page_title="IFRS9 Validation GAP Analysis", |
|
|
page_icon="π", |
|
|
layout="wide" |
|
|
) |
|
|
|
|
|
|
|
|
def display_results(results: GapAnalysisState) -> None: |
|
|
"""Display the GAP analysis results in a structured format.""" |
|
|
st.header("π GAP Analysis Results") |
|
|
|
|
|
|
|
|
queries = results.queries if hasattr(results, 'queries') else results.get('queries', []) |
|
|
gap_analyses = results.gap_analyses if hasattr(results, 'gap_analyses') else results.get('gap_analyses', {}) |
|
|
|
|
|
if not queries: |
|
|
st.error("No queries generated. Please try running the analysis again.") |
|
|
return |
|
|
|
|
|
|
|
|
if 'consolidated_analysis' not in st.session_state: |
|
|
with st.spinner("Generating consolidated analysis..."): |
|
|
try: |
|
|
consolidated_analysis = generate_consolidated_analysis(queries, gap_analyses) |
|
|
st.session_state.consolidated_analysis = consolidated_analysis |
|
|
except Exception as e: |
|
|
st.error(f"Error generating consolidated analysis: {str(e)}") |
|
|
st.exception(e) |
|
|
|
|
|
|
|
|
tabs = st.tabs(["π Consolidated Analysis", "π Individual Queries"]) |
|
|
|
|
|
|
|
|
with tabs[0]: |
|
|
if 'consolidated_analysis' in st.session_state: |
|
|
|
|
|
display_consolidated_analysis(st.session_state.consolidated_analysis) |
|
|
|
|
|
|
|
|
if st.button("π Export Consolidated Report as PDF"): |
|
|
try: |
|
|
pdf_path = generate_consolidated_pdf(st.session_state.consolidated_analysis, queries, gap_analyses) |
|
|
with open(pdf_path, "rb") as pdf_file: |
|
|
pdf_bytes = pdf_file.read() |
|
|
st.download_button( |
|
|
label="β¬οΈ Download Consolidated PDF Report", |
|
|
data=pdf_bytes, |
|
|
file_name="consolidated_gap_analysis_report.pdf", |
|
|
mime="application/pdf" |
|
|
) |
|
|
|
|
|
os.remove(pdf_path) |
|
|
except Exception as e: |
|
|
st.error(f"Error generating consolidated PDF: {str(e)}") |
|
|
|
|
|
|
|
|
with tabs[1]: |
|
|
|
|
|
if st.button("π Export Individual Queries as PDF"): |
|
|
try: |
|
|
pdf_path = generate_gap_analysis_pdf(queries, gap_analyses) |
|
|
with open(pdf_path, "rb") as pdf_file: |
|
|
pdf_bytes = pdf_file.read() |
|
|
st.download_button( |
|
|
label="β¬οΈ Download Individual Queries PDF", |
|
|
data=pdf_bytes, |
|
|
file_name="individual_queries_report.pdf", |
|
|
mime="application/pdf" |
|
|
) |
|
|
|
|
|
os.remove(pdf_path) |
|
|
except Exception as e: |
|
|
st.error(f"Error generating PDF: {str(e)}") |
|
|
|
|
|
|
|
|
query_tabs = st.tabs([f"Query {i+1}" for i in range(len(queries))]) |
|
|
|
|
|
|
|
|
for i, (tab, query) in enumerate(zip(query_tabs, queries)): |
|
|
with tab: |
|
|
if query in gap_analyses: |
|
|
response = gap_analyses[query] |
|
|
st.markdown(f"### πΉ {query}") |
|
|
st.markdown(f"β‘οΈ {response}") |
|
|
else: |
|
|
st.warning(f"No analysis available for this query.") |
|
|
|
|
|
def main(): |
|
|
"""Main application function.""" |
|
|
st.title("IFRS9 Validation GAP Analysis") |
|
|
st.markdown(""" |
|
|
This application allows you to upload IFRS9 validation reports and generates |
|
|
a comprehensive GAP analysis using advanced language models. |
|
|
""") |
|
|
|
|
|
|
|
|
uploaded_file = st.file_uploader( |
|
|
"Upload your IFRS9 validation report", |
|
|
type=['txt', 'pdf', 'docx', 'doc'], |
|
|
help="Upload a text, PDF, or Word document containing your IFRS9 validation report." |
|
|
) |
|
|
|
|
|
|
|
|
run_analysis = False |
|
|
file_contents = "" |
|
|
|
|
|
if uploaded_file is not None: |
|
|
try: |
|
|
|
|
|
file_contents = extract_text_from_file(uploaded_file) |
|
|
|
|
|
|
|
|
st.subheader("Document Preview") |
|
|
st.markdown(f"**File:** {uploaded_file.name}") |
|
|
file_size = len(uploaded_file.getvalue()) / 1024 |
|
|
st.markdown(f"**Size:** {file_size:.2f} KB") |
|
|
st.markdown(f"**File Type:** {uploaded_file.name.split('.')[-1].upper()}") |
|
|
|
|
|
|
|
|
with st.expander("Preview document content", expanded=False): |
|
|
preview_text = file_contents[:5000] + "..." if len(file_contents) > 5000 else file_contents |
|
|
st.text_area("Document content (preview)", preview_text, height=200) |
|
|
|
|
|
|
|
|
if st.button("Run GAP Analysis", type="primary"): |
|
|
run_analysis = True |
|
|
|
|
|
except Exception as e: |
|
|
st.error(f"β Error processing file: {str(e)}") |
|
|
st.exception(e) |
|
|
|
|
|
|
|
|
if run_analysis and file_contents: |
|
|
with st.spinner("Running GAP analysis... This may take several minutes depending on document length."): |
|
|
try: |
|
|
|
|
|
if 'results' in st.session_state: |
|
|
del st.session_state.results |
|
|
if 'consolidated_analysis' in st.session_state: |
|
|
del st.session_state.consolidated_analysis |
|
|
|
|
|
|
|
|
results = run_gap_analysis_pipeline(file_contents) |
|
|
|
|
|
|
|
|
st.session_state.results = { |
|
|
'queries': results["queries"], |
|
|
'gap_analyses': results["gap_analyses"] |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
st.error(f"β Error during GAP analysis: {str(e)}") |
|
|
st.exception(e) |
|
|
|
|
|
|
|
|
if 'results' in st.session_state: |
|
|
display_results(st.session_state.results) |
|
|
|
|
|
|
|
|
st.sidebar.header("About") |
|
|
st.sidebar.markdown(""" |
|
|
This application performs GAP analysis on IFRS9 validation reports using |
|
|
LangChain and LangGraph with GPT-4o. |
|
|
|
|
|
The analysis includes: |
|
|
- Generating critical GAP analysis questions |
|
|
- Retrieving relevant information |
|
|
- Filtering pertinent documents |
|
|
- Performing detailed GAP analysis |
|
|
""") |
|
|
|
|
|
|
|
|
api_key_status = "β
Available" if os.environ.get("OPENAI_API_KEY") else "β Missing" |
|
|
st.sidebar.markdown(f"**OpenAI API Key:** {api_key_status}") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|