Spaces:
Running
Running
| from langchain_community.document_loaders import PyPDFLoader | |
| import gradio as gr | |
| from langchain.chains.summarize import load_summarize_chain | |
| from huggingface_hub import InferenceClient | |
| from langchain_community.llms import HuggingFaceHub | |
| import os | |
| # Set your Hugging Face token securely | |
| os.environ["HUGGINGFACEHUB_API_TOKEN"] = "" | |
| # Create the LLM | |
| llm = HuggingFaceHub( | |
| repo_id="facebook/bart-large-cnn", # Summarization-capable model | |
| model_kwargs={"temperature": 0.7, "max_length": 512} | |
| ) | |
| #TEXT_MODEL_NAME = "google/gemma-3-270m" | |
| loader = PyPDFLoader("http://arxiv.org/pdf/2508.13246v1") | |
| documents = loader.load() | |
| #llm = OpenAI(temperature=0) | |
| def summarize_pdf (pdf_file_path, custom_prompt=""): | |
| loader = PyPDFLoader(pdf_file_path) | |
| docs = loader.load_and_split() | |
| chain = load_summarize_chain(llm, chain_type="map_reduce") | |
| summary = chain.run(docs) | |
| return summary | |
| input_pdf_path = gr.Textbox(label="Enter the PDF file path") | |
| output_summary = gr.Textbox(label="Summary") | |
| interface = gr.Interface( | |
| fn = summarize_pdf, | |
| inputs = input_pdf_path, | |
| outputs = output_summary, | |
| title = "PDF Summarizer", | |
| description = "This app allows you to summarize your PDF files.", | |
| ) | |
| #demo.launch(share=True) | |
| if __name__ == "__main__": | |
| interface.launch(share=True) | |