embedding?
Browse files- App/Embedding/EmbeddingRoutes.py +3 -1
- App/Worker.py +0 -1
- App/__init__.py +9 -23
- App/app.py +3 -2
- requirements.txt +1 -0
App/Embedding/EmbeddingRoutes.py
CHANGED
|
@@ -2,6 +2,7 @@ from fastapi import APIRouter
|
|
| 2 |
from App.Transcription.Model import Transcriptions
|
| 3 |
from .utils.Initialize import generateChunks, encode, search
|
| 4 |
from .Schemas import SearchRequest
|
|
|
|
| 5 |
embeddigs_router = APIRouter(tags=["embeddings"])
|
| 6 |
|
| 7 |
|
|
@@ -15,6 +16,7 @@ async def create_embeddings(task_id):
|
|
| 15 |
|
| 16 |
return
|
| 17 |
|
|
|
|
| 18 |
@embeddigs_router.get("/create_summary")
|
| 19 |
async def create_summary(task_id):
|
| 20 |
item = await Transcriptions.objects.filter(task_id=task_id).first()
|
|
@@ -28,5 +30,5 @@ async def create_summary(task_id):
|
|
| 28 |
# search
|
| 29 |
# update?
|
| 30 |
@embeddigs_router.post("/search_embeddings")
|
| 31 |
-
async def search_embeddings(req:SearchRequest):
|
| 32 |
return search(query=req.query, task_id=req.taskId)
|
|
|
|
| 2 |
from App.Transcription.Model import Transcriptions
|
| 3 |
from .utils.Initialize import generateChunks, encode, search
|
| 4 |
from .Schemas import SearchRequest
|
| 5 |
+
|
| 6 |
embeddigs_router = APIRouter(tags=["embeddings"])
|
| 7 |
|
| 8 |
|
|
|
|
| 16 |
|
| 17 |
return
|
| 18 |
|
| 19 |
+
|
| 20 |
@embeddigs_router.get("/create_summary")
|
| 21 |
async def create_summary(task_id):
|
| 22 |
item = await Transcriptions.objects.filter(task_id=task_id).first()
|
|
|
|
| 30 |
# search
|
| 31 |
# update?
|
| 32 |
@embeddigs_router.post("/search_embeddings")
|
| 33 |
+
async def search_embeddings(req: SearchRequest):
|
| 34 |
return search(query=req.query, task_id=req.taskId)
|
App/Worker.py
CHANGED
|
@@ -25,7 +25,6 @@ def transcription_task(self, file_path, model_size="tiny"):
|
|
| 25 |
return data
|
| 26 |
|
| 27 |
|
| 28 |
-
|
| 29 |
@celery.task(name="download", bind=True)
|
| 30 |
def downloadfile(self, url, ydl_opts, model_size="base"):
|
| 31 |
# updated
|
|
|
|
| 25 |
return data
|
| 26 |
|
| 27 |
|
|
|
|
| 28 |
@celery.task(name="download", bind=True)
|
| 29 |
def downloadfile(self, url, ydl_opts, model_size="base"):
|
| 30 |
# updated
|
App/__init__.py
CHANGED
|
@@ -1,27 +1,13 @@
|
|
| 1 |
from telethon import TelegramClient
|
| 2 |
from telethon.sessions import StringSession
|
|
|
|
| 3 |
|
|
|
|
| 4 |
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
bot:TelegramClient = TelegramClient(
|
| 14 |
-
StringSession('1BVtsOIwBuxTiAi57fIIpM_mCJiic5U36YhvLughpTdbevbbipZPNIeTzhTi6KvPdigX9zFnvrLzFDthiat20fy44fpMuZMAKNO5mFg1LCGus1PeiQAOqhCO0LZbmBG3BNB5vgbuTJL9TgTUeY-QcFXlUVABwhYO9UiDBkS7IZCNa9eRsYBEVhXUc6O4kENQUerCCAwWFxAx9mxCkh7ADWsw5bNptl1pNvcdaUes6Zq_GW1eraTZ4mR2piiE3vyoqfzuRy40apmzvyFVkgtyD4p5-MCoIdVoS5zQONP2GA6G5Bw7Z2YOWI5D_Y51DxjQICnS2UDMLxqKhaqkUKeMR5GC-5RyNKVw='),
|
| 15 |
-
api_id=870972,
|
| 16 |
-
api_hash="ce2efaca02dfcd110941be6025e9ac0d",
|
| 17 |
-
)
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
# upload_bot = Client(
|
| 22 |
-
# str("the book"),
|
| 23 |
-
# api_id=870972,
|
| 24 |
-
# api_hash="ce2efaca02dfcd110941be6025e9ac0d",
|
| 25 |
-
# bot_token="6183919505:AAEhHFt4mI18bQeAf2Lj7AePXFRPVLrOFM8",
|
| 26 |
-
# workers=9,
|
| 27 |
-
# )
|
|
|
|
| 1 |
from telethon import TelegramClient
|
| 2 |
from telethon.sessions import StringSession
|
| 3 |
+
import os
|
| 4 |
|
| 5 |
+
TELEGRAM_SESSION = os.environ.get("TELEGRAM_SESSION")
|
| 6 |
|
| 7 |
+
bot: TelegramClient = TelegramClient(
|
| 8 |
+
StringSession(
|
| 9 |
+
TELEGRAM_SESSION
|
| 10 |
+
),
|
| 11 |
+
api_id=870972,
|
| 12 |
+
api_hash="ce2efaca02dfcd110941be6025e9ac0d",
|
| 13 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
App/app.py
CHANGED
|
@@ -10,7 +10,8 @@ from .Transcription.TranscriptionRoutes import transcription_router
|
|
| 10 |
from .Streaming.StreamingRoutes import streaming_router
|
| 11 |
from .UserTranscriptions.UserTranscriptionsRoutes import user_transcriptions_router
|
| 12 |
from .Monitor.monitorRoutes import monitor_router
|
| 13 |
-
|
|
|
|
| 14 |
from .Chat.ChatRoutes import chat_router
|
| 15 |
|
| 16 |
from fastapi.middleware.cors import CORSMiddleware
|
|
@@ -78,5 +79,5 @@ app.include_router(transcription_router)
|
|
| 78 |
app.include_router(streaming_router)
|
| 79 |
app.include_router(monitor_router)
|
| 80 |
app.include_router(user_transcriptions_router)
|
| 81 |
-
|
| 82 |
# app.include_router(chat_router)
|
|
|
|
| 10 |
from .Streaming.StreamingRoutes import streaming_router
|
| 11 |
from .UserTranscriptions.UserTranscriptionsRoutes import user_transcriptions_router
|
| 12 |
from .Monitor.monitorRoutes import monitor_router
|
| 13 |
+
|
| 14 |
+
from .Embedding.EmbeddingRoutes import embeddigs_router
|
| 15 |
from .Chat.ChatRoutes import chat_router
|
| 16 |
|
| 17 |
from fastapi.middleware.cors import CORSMiddleware
|
|
|
|
| 79 |
app.include_router(streaming_router)
|
| 80 |
app.include_router(monitor_router)
|
| 81 |
app.include_router(user_transcriptions_router)
|
| 82 |
+
app.include_router(embeddigs_router)
|
| 83 |
# app.include_router(chat_router)
|
requirements.txt
CHANGED
|
@@ -40,4 +40,5 @@ google-generativeai
|
|
| 40 |
openai
|
| 41 |
tiktoken
|
| 42 |
langchain
|
|
|
|
| 43 |
poe-api-wrapper[proxy]
|
|
|
|
| 40 |
openai
|
| 41 |
tiktoken
|
| 42 |
langchain
|
| 43 |
+
pinecone-client[grpc]
|
| 44 |
poe-api-wrapper[proxy]
|