File size: 6,449 Bytes
5c372f3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os \n",
    "import numpy as np"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "import getpass\n",
    "from langchain_groq import ChatGroq\n",
    "os.environ[\"GROQ_API_KEY\"] = getpass.getpass()\n",
    "llm_groq = ChatGroq(model=\"llama3-8b-8192\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[Document(metadata={'source': 'https://www.youtube.com/watch?v=Ilg3gGewQ5U&ab_channel=3Blue1Brown'}, page_content=\" Here, we tackle backpropagation, the core algorithm behind how neural networks learn. After a quick recap for where we are, the first thing I'll do is an intuitive walkthrough  for what the algorithm is actually doing, without any reference to the formulas. Then, for those of you who do want to dive into the math,  the next video goes into the calculus underlying all this. If you watched the last two videos, or if you're just jumping in with the appropriate  background, you know what a neural network is, and how it feeds forward information. Here, we're doing the classic example of recognizing handwritten digits whose pixel  values get fed into the first layer of the network with 784 neurons,  and I've been showing a network with two hidden layers having just 16 neurons each,  and an output layer of 10 neurons, indicating which digit the network is choosing  as its answer. I'm also expecting you to understand gradient descent,  as described in the last video, and how what we mean by l\")]\n"
     ]
    }
   ],
   "source": [
    "from youtube_transcript_api import YouTubeTranscriptApi\n",
    "from langchain.docstore.document import Document\n",
    "\n",
    "def get_text_from_youtube_link(video_link,max_video_length=1000):\n",
    "    video_text = \"\"\n",
    "    meta_data = {\"source\": f\"{video_link}\"} \n",
    "    video_id = video_link.split(\"watch?v=\")[1].split(\"&\")[0]\n",
    "    srt = YouTubeTranscriptApi.get_transcript(video_id)\n",
    "    for text_data in srt:\n",
    "        video_text = video_text + \" \" + text_data.get(\"text\")\n",
    "    if len(video_text) > max_video_length:\n",
    "        video_text = video_text[0:max_video_length]\n",
    "    document = [Document(page_content= video_text, metadata= meta_data)]\n",
    "    return document\n",
    "\n",
    "\n",
    "video_document  = get_text_from_youtube_link(\"https://www.youtube.com/watch?v=Ilg3gGewQ5U&ab_channel=3Blue1Brown\")\n",
    "\n",
    "print(video_document)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.chains.summarize.chain import load_summarize_chain\n",
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "\n",
    "def prompt_template_to_analyze_resume():\n",
    "    template = \"\"\"\n",
    "    You are provided with the Context of the you tube link . your task is to summarize of the content  \n",
    "    in few lines and higlight key points. Do not make up answers.\n",
    "    \\n\\n:{context}\n",
    "    \"\"\"\n",
    "    prompt = ChatPromptTemplate.from_messages(\n",
    "        [\n",
    "            ('system',template),\n",
    "            ('human','input'),\n",
    "        ]\n",
    "        )\n",
    "    return prompt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "\n",
      "\u001b[1m> Entering new StuffDocumentsChain chain...\u001b[0m\n",
      "\n",
      "\n",
      "\u001b[1m> Entering new LLMChain chain...\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\" Here, we tackle backpropagation, the core algorithm behind how neural networks learn. After a quick recap for where we are, the first thing I'll do is an intuitive walkthrough  for what the algorithm is actually doing, without any reference to the formulas. Then, for those of you who do want to dive into the math,  the next video goes into the calculus underlying all this. If you watched the last two videos, or if you're just jumping in with the appropriate  background, you know what a neural network is, and how it feeds forward information. Here, we're doing the classic example of recognizing handwritten digits whose pixel  values get fed into the first layer of the network with 784 neurons,  and I've been showing a network with two hidden layers having just 16 neurons each,  and an output layer of 10 neurons, indicating which digit the network is choosing  as its answer. I'm also expecting you to understand gradient descent,  as described in the last video, and how what we mean by l\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "\n",
      "\u001b[1m> Finished chain.\u001b[0m\n",
      "\n",
      "\u001b[1m> Finished chain.\u001b[0m\n",
      "The article discusses the algorithm of backpropagation, which is the key to how neural networks learn. It will provide an intuitive explanation of the algorithm without referencing formulas, followed by a mathematical breakdown for those who want to dive deeper. The article assumes a basic understanding of neural networks, including recognizing handwritten digits and using gradient descent.\n"
     ]
    }
   ],
   "source": [
    "summarize_chain = load_summarize_chain(llm=llm_groq, chain_type='stuff', verbose = True )\n",
    "results = summarize_chain.invoke({'input_documents':video_document})\n",
    "print(results['output_text'])\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pylangchain",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.14"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}