mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-21 20:59:05 +00:00
fix imports
This commit is contained in:
@@ -1,10 +1,10 @@
|
||||
import configparser
|
||||
import sys
|
||||
|
||||
import boto3
|
||||
import botocore
|
||||
from run_utils import config
|
||||
|
||||
from log_utils import logger
|
||||
from run_utils import config
|
||||
|
||||
BUCKET_NAME = config["DEFAULT"]["BUCKET_NAME"]
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import configparser
|
||||
|
||||
import nltk
|
||||
import torch
|
||||
from nltk.corpus import stopwords
|
||||
@@ -7,13 +5,13 @@ from nltk.tokenize import word_tokenize
|
||||
from sklearn.feature_extraction.text import TfidfVectorizer
|
||||
from sklearn.metrics.pairwise import cosine_similarity
|
||||
from transformers import BartForConditionalGeneration, BartTokenizer
|
||||
from run_utils import config
|
||||
|
||||
from log_utils import logger
|
||||
from run_utils import config
|
||||
|
||||
nltk.download('punkt', quiet=True)
|
||||
|
||||
|
||||
|
||||
def preprocess_sentence(sentence):
|
||||
stop_words = set(stopwords.words('english'))
|
||||
tokens = word_tokenize(sentence.lower())
|
||||
|
||||
Reference in New Issue
Block a user