Amazing-Python-Scripts

Форк
0
1
import string
2
from collections import Counter
3

4

5
class model(object):
6

7
    def __init__(self):
8
        super().__init__()
9

10
    @staticmethod
11
    def get_sentimental_analysis(text):
12
        # reading text file
13
        # converting to lowercase
14
        lower_case = text.lower()
15

16
        # Removing punctuations
17
        cleaned_text = lower_case.translate(
18
            str.maketrans('', '', string.punctuation))
19

20
        # splitting text into words
21
        tokenized_words = cleaned_text.split()
22

23
        stop_words = ["i", "me", "my", "myself", "we", "our", "ours", "ourselves", "you", "your", "yours", "yourself",
24
                      "yourselves", "he", "him", "his", "himself", "she", "her", "hers", "herself", "it", "its",
25
                      "itself",
26
                      "they", "them", "their", "theirs", "themselves", "what", "which", "who", "whom", "this", "that",
27
                      "these",
28
                      "those", "am", "is", "are", "was", "were", "be", "been", "being", "have", "has", "had", "having",
29
                      "do",
30
                      "does", "did", "doing", "a", "an", "the", "and", "but", "if", "or", "because", "as", "until",
31
                      "while",
32
                      "of", "at", "by", "for", "with", "about", "against", "between", "into", "through", "during",
33
                      "before",
34
                      "after", "above", "below", "to", "from", "up", "down", "in", "out", "on", "off", "over", "under",
35
                      "again",
36
                      "further", "then", "once", "here", "there", "when", "where", "why", "how", "all", "any", "both",
37
                      "each",
38
                      "few", "more", "most", "other", "some", "such", "no", "nor", "only", "own", "same", "so",
39
                      "than",
40
                      "too", "very", "s", "t", "can", "will", "just", "don", "should", "now"]
41

42
        # Removing stop words from the tokenized words list
43
        final_words = []
44
        for word in tokenized_words:
45
            if word not in stop_words:
46
                final_words.append(word)
47

48
        emotion_list = []
49
        with open('./Text_Sentimental_Analysis_Script_with_GUI/textFiles/emotions.txt', 'r') as file:
50
            for line in file:
51
                clear_line = line.replace("\n", '').replace(
52
                    ",", '').replace("'", '').strip()
53
                word, emotion = clear_line.split(':')
54

55
                if word in final_words:
56
                    emotion_list.append(emotion)
57

58
        if emotion_list is None or len(emotion_list) == 0:
59
            final_emotion = "Sorry the entered text was not enough for making a Sentiment Analysis please try again "
60
        else:
61
            final_emotion = "Your Sentimental Analysis says that your emotion is" + max(Counter(emotion_list),
62
                                                                                        key=Counter(emotion_list).get)
63
        return final_emotion
64

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.