migrated all files
parent
35c2ed34f3
commit
683c9af01a
Binary file not shown.
@ -1,6 +0,0 @@
|
|||||||
Platform: FaceApp https://www.faceapp.com
|
|
||||||
Initial release: December 31, 2016
|
|
||||||
Type:Image editing
|
|
||||||
Description: FaceApp is a mobile application for iOS and Android developed by Russian company Wireless Lab. The app generates highly realistic transformations of human faces in photographs by using neural networks based on artificial intelligence.[1][2][3] The app can transform a face to make it smile, look younger, look older, or change gender.
|
|
||||||
Original Terms of Service: https://www.faceapp.com/privacy-en.html
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
|||||||
[('platform', 'FaceApp'), ('Type', 'Image editing'), ('Initial release', 'December 31, 2016'), ('Type', 'Image editing')]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Platform, FaceApp https://www.faceapp.com
|
|
||||||
Initial release: December 31, 2016
|
|
||||||
Type:Image editing
|
|
||||||
Description: FaceApp is a mobile application for iOS and Android developed by Russian company Wireless Lab. The app generates highly realistic transformations of human faces in photographs by using neural networks based on artificial intelligence.[1][2][3] The app can transform a face to make it smile, look younger, look older, or change gender.
|
|
||||||
Original Terms of Service: https://www.faceapp.com/privacy-en.html
|
|
||||||
|
|
@ -1 +0,0 @@
|
|||||||
If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account . If you permit others to use your account credentials , you are responsible for the activities of such users that occur in connection with your account .
|
|
@ -1,24 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
# # for new_file in tokens_without_stopwords:
|
|
||||||
# appendFile = open('tokenized_words.txt', 'a')
|
|
||||||
# appendFile.write(" " + new_file)
|
|
||||||
# appendFile.close()
|
|
||||||
|
|
||||||
|
|
||||||
# #shows only stopwords
|
|
||||||
# processed_word_list = []
|
|
||||||
|
|
||||||
# for word in tokenized:
|
|
||||||
# # print(word)
|
|
||||||
# if word not in all_stopwords:
|
|
||||||
# processed_word_list.append('*')
|
|
||||||
# else:
|
|
||||||
# processed_word_list.append(word)
|
|
||||||
# print(processed_word_list)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# # # result putting in a graph
|
|
||||||
# top_words_plot = frequency_word.plot(10)
|
|
||||||
# print(top_words_plot)
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -1,178 +0,0 @@
|
|||||||
from __future__ import division
|
|
||||||
import glob
|
|
||||||
from nltk import *
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
import nltk
|
|
||||||
import codecs
|
|
||||||
from nltk import sent_tokenize, word_tokenize, pos_tag
|
|
||||||
from nltk.probability import FreqDist
|
|
||||||
from nltk.corpus import stopwords
|
|
||||||
nltk.download('stopwords')
|
|
||||||
|
|
||||||
|
|
||||||
#open the txt file, read, and tokenize
|
|
||||||
file = open('faceapp.txt','r')
|
|
||||||
text = file.read()
|
|
||||||
|
|
||||||
#stopwords
|
|
||||||
default_stopwords = set(stopwords.words('english'))
|
|
||||||
custom_stopwords = set(codecs.open('stopwords.txt', 'r').read().splitlines())
|
|
||||||
all_stopwords = default_stopwords | custom_stopwords
|
|
||||||
|
|
||||||
print('''<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title></title>
|
|
||||||
<style>
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Belgika";
|
|
||||||
src: url("http://bohyewoo.com/webfonts/belgika/belgika-40th-webfont.eot");
|
|
||||||
src: url("http://bohyewoo.com/webfonts/belgika/belgika-40th-webfont.woff") format("woff"),
|
|
||||||
url("http://bohyewoo.com/webfonts/belgika/belgika-40th-webfont.svg#filename") format("svg");
|
|
||||||
}
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Belgika";
|
|
||||||
src: url("http://bohyewoo.com/webfonts/belgika/belgika-16th-webfont.eot");
|
|
||||||
src: url("http://bohyewoo.com/webfonts/belgika/belgika-16th-webfont.woff") format("woff"),
|
|
||||||
url("http://bohyewoo.com/webfonts/belgika/belgika-16th-webfont.svg#filename") format("svg");
|
|
||||||
}
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Belgika";
|
|
||||||
src: url("http://bohyewoo.com/webfonts/belgika/belgika-8th-webfont.eot");
|
|
||||||
src: url("http://bohyewoo.com/webfonts/belgika/belgika-8th-webfont.woff") format("woff"),
|
|
||||||
url("http://bohyewoo.com/webfonts/belgika/belgika-8th-webfont.svg#filename") format("svg");
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: helvetica;
|
|
||||||
font-weight: regular;
|
|
||||||
letter-spacing: 0.5px;
|
|
||||||
font-size: 20px;
|
|
||||||
line-height: 1.2;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.NNP {
|
|
||||||
background-color: pink;
|
|
||||||
}
|
|
||||||
|
|
||||||
.VBP {
|
|
||||||
background-color: gold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.NN {
|
|
||||||
background-color: LightSkyBlue;
|
|
||||||
}
|
|
||||||
|
|
||||||
.NNS {
|
|
||||||
background-color: Aquamarine;
|
|
||||||
}
|
|
||||||
|
|
||||||
.paragraph {
|
|
||||||
width: 70%;
|
|
||||||
float: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
.top_words {
|
|
||||||
font-size: 9pt;
|
|
||||||
width: 25%;
|
|
||||||
float: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>''')
|
|
||||||
|
|
||||||
|
|
||||||
# my stopwords are common words I don't want to count, like "a", "an", "the".
|
|
||||||
|
|
||||||
print('<div class ="paragraph">')
|
|
||||||
for sentence in sent_tokenize(text):
|
|
||||||
print('<span>')
|
|
||||||
|
|
||||||
tokenized = word_tokenize(sentence)
|
|
||||||
tagged = pos_tag(tokenized)
|
|
||||||
|
|
||||||
# for HTML
|
|
||||||
for word, pos in tagged:
|
|
||||||
print('<span class="{}">{}</span>'.format(pos, word))
|
|
||||||
|
|
||||||
print('</span>')
|
|
||||||
print('</div>')
|
|
||||||
|
|
||||||
# filtering stopwords
|
|
||||||
tokens_without_stopwords = nltk.FreqDist(words.lower() for words in tokenized if words.lower() not in all_stopwords)
|
|
||||||
print(tokens_without_stopwords)
|
|
||||||
|
|
||||||
# for read_whole_text in tokens_without_stopwords:
|
|
||||||
# whole_text_tokenized =
|
|
||||||
# print(whole_text_tokenized)
|
|
||||||
|
|
||||||
# #filtered words in sentence
|
|
||||||
# filtered_sentence = (" ").join(tokens_without_stopwords)
|
|
||||||
# print(filtered_sentence)
|
|
||||||
|
|
||||||
print('<div class="top_words"> colonial words:')
|
|
||||||
|
|
||||||
frequency_word = FreqDist(tokens_without_stopwords)
|
|
||||||
top_words = tokens_without_stopwords.most_common(10)
|
|
||||||
|
|
||||||
for chosen_words, frequency in top_words:
|
|
||||||
print('<br><span class="chosen_words">{}({}) </span>'.format(chosen_words, frequency))
|
|
||||||
|
|
||||||
|
|
||||||
print('''</div></body></html>''')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# for new_file in tokens_without_stopwords:
|
|
||||||
# appendFile = open('tokenized_words.txt', 'a')
|
|
||||||
# appendFile.write(" " + new_file)
|
|
||||||
# appendFile.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# #shows only stopwords
|
|
||||||
# processed_word_list = []
|
|
||||||
|
|
||||||
# for word in tokenized:
|
|
||||||
# # print(word)
|
|
||||||
# if word not in all_stopwords:
|
|
||||||
# processed_word_list.append('*')
|
|
||||||
# else:
|
|
||||||
# processed_word_list.append(word)
|
|
||||||
# print(processed_word_list)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# # # result putting in a graph
|
|
||||||
# top_words_plot = frequency_word.plot(10)
|
|
||||||
# print(top_words_plot)
|
|
Loading…
Reference in New Issue