changed dirs, clean code
This commit is contained in:
parent
5080ad4f63
commit
f5081d174d
@ -22,12 +22,46 @@ class MWThesaurusParser(WordParser):
|
||||
[e.remove(ul) for ul in e.findall(".//ul")]
|
||||
d = ot(e)
|
||||
thes[d] = {"examples": examples}
|
||||
thes[d]["synonyms"] = [ ot(li) for li in se.findall( ".//span[@class='thes-list syn-list']/div[@class='thes-list-content synonyms_list']//li//a") ]
|
||||
thes[d]["near synonyms"] = [ ot(li) for li in se.findall( ".//span[@class='thes-list rel-list']/div[@class='thes-list-content synonyms_list']//li//a") ]
|
||||
thes[d]["near synonyms"].extend([ ot(li) for li in se.findall( ".//span[@class='thes-list sim-list']/div[@class='thes-list-content synonyms_list']//li//a") ])
|
||||
thes[d]["near antonyms"] = [ ot(li) for li in se.findall( ".//span[@class='thes-list near-list']/div[@class='thes-list-content synonyms_list']//li//a") ]
|
||||
thes[d]["near antonyms"].extend([ ot(li) for li in se.findall( ".//span[@class='thes-list opp-list']/div[@class='thes-list-content synonyms_list']//li//a") ])
|
||||
thes[d]["antonyms"] = [ ot(li) for li in se.findall( ".//span[@class='thes-list ant-list']/div[@class='thes-list-content synonyms_list']//li//a") ]
|
||||
thes[d]["synonyms"] = [
|
||||
ot(li)
|
||||
for li in se.findall(
|
||||
".//span[@class='thes-list syn-list']/div[@class='thes-list-content synonyms_list']//li//a"
|
||||
)
|
||||
]
|
||||
thes[d]["near synonyms"] = [
|
||||
ot(li)
|
||||
for li in se.findall(
|
||||
".//span[@class='thes-list rel-list']/div[@class='thes-list-content synonyms_list']//li//a"
|
||||
)
|
||||
]
|
||||
thes[d]["near synonyms"].extend(
|
||||
[
|
||||
ot(li)
|
||||
for li in se.findall(
|
||||
".//span[@class='thes-list sim-list']/div[@class='thes-list-content synonyms_list']//li//a"
|
||||
)
|
||||
]
|
||||
)
|
||||
thes[d]["near antonyms"] = [
|
||||
ot(li)
|
||||
for li in se.findall(
|
||||
".//span[@class='thes-list near-list']/div[@class='thes-list-content synonyms_list']//li//a"
|
||||
)
|
||||
]
|
||||
thes[d]["near antonyms"].extend(
|
||||
[
|
||||
ot(li)
|
||||
for li in se.findall(
|
||||
".//span[@class='thes-list opp-list']/div[@class='thes-list-content synonyms_list']//li//a"
|
||||
)
|
||||
]
|
||||
)
|
||||
thes[d]["antonyms"] = [
|
||||
ot(li)
|
||||
for li in se.findall(
|
||||
".//span[@class='thes-list ant-list']/div[@class='thes-list-content synonyms_list']//li//a"
|
||||
)
|
||||
]
|
||||
|
||||
return thes
|
||||
|
||||
@ -60,7 +94,7 @@ class MWThesaurusParser(WordParser):
|
||||
# print(w.todict())
|
||||
# exit()
|
||||
|
||||
q = Queue(MWThesaurusParser, "en_MW_thesaurus/", "_mwt.json", prefix_length=2)
|
||||
q = Queue(MWThesaurusParser, "en_MWThesaurus/", "_MWT.json")
|
||||
q.loadDB()
|
||||
|
||||
while True:
|
||||
|
@ -1,7 +1,6 @@
|
||||
# import matplotlib.pyplot as plt
|
||||
# from PIL import Image
|
||||
# from wordcloud import STOPWORDS, WordCloud
|
||||
from dict_dl import fulldictionary
|
||||
|
||||
d = FullDictionary("en_merriam_webster/", "_mw.json")
|
||||
# d = Dictionary("en_MW_thesaurus/", "_mwt.json")
|
||||
@ -23,6 +22,7 @@ with open(f"{d.dir_prefix}redo", "at") as f:
|
||||
f.write("\n".join(list(again)))
|
||||
exit()
|
||||
|
||||
|
||||
def grey_color_func(
|
||||
word, font_size, position, orientation, random_state=None, **kwargs
|
||||
):
|
||||
|
9
duden.py
9
duden.py
@ -81,7 +81,8 @@ class DudenParser(WordParser):
|
||||
assert (
|
||||
self.type or self.definitions
|
||||
), f"{self.time} {self.word}: type or definitions came back empty..."
|
||||
return uqall({
|
||||
return uqall(
|
||||
{
|
||||
self.word: {
|
||||
"type": self.type,
|
||||
"definitions": self.definitions,
|
||||
@ -91,14 +92,16 @@ class DudenParser(WordParser):
|
||||
"wendungen": self.wendungen,
|
||||
"time_of_retrieval": self.time,
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# d = DudenParser("hinfallen")
|
||||
# print(d.neighbours)
|
||||
# print(d.todict())
|
||||
# exit()
|
||||
|
||||
q = Queue(DudenParser, "de_duden/", "_duden.json")
|
||||
q = Queue(DudenParser, "de_Duden/", "_D.json")
|
||||
q.loadDB()
|
||||
|
||||
while True:
|
||||
|
@ -108,7 +108,8 @@ class MerriamWebsterParser(WordParser):
|
||||
assert (
|
||||
self.type or self.definitions
|
||||
), f"{self.time} {self.word}: type or definitions came back empty..."
|
||||
return uqall({
|
||||
return uqall(
|
||||
{
|
||||
self.word: {
|
||||
"type": self.type,
|
||||
"definitions": self.definitions,
|
||||
@ -121,7 +122,8 @@ class MerriamWebsterParser(WordParser):
|
||||
"first_known_use": self.first_known_use,
|
||||
"time_of_retrieval": self.time,
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# testword = "revivalist"
|
||||
@ -132,7 +134,7 @@ class MerriamWebsterParser(WordParser):
|
||||
# print(f"### {k} ###\n", v)
|
||||
# exit()
|
||||
|
||||
q = Queue(MerriamWebsterParser, "en_merriam_webster/", "_mw.json", prefix_length=2)
|
||||
q = Queue(MerriamWebsterParser, "en_MerriamWebster/", "_MW.json")
|
||||
q.loadDB()
|
||||
|
||||
while True:
|
||||
|
19
t.py
19
t.py
@ -10,11 +10,22 @@ prefix = query[:2]
|
||||
|
||||
d = Dictionary("en_MW_thesaurus/", f"{prefix}_mwt.json")
|
||||
print(f"### {query:<70}###")
|
||||
print("================================================================================")
|
||||
print(
|
||||
"================================================================================"
|
||||
)
|
||||
for k, v in d[query].items():
|
||||
if k != "type":
|
||||
print(f" {k}")
|
||||
print("--------------------------------------------------------------------------------")
|
||||
for ka in ["synonyms", "related" if "related" in v else "near synonyms", "near antonyms", "antonyms"]:
|
||||
print(
|
||||
"--------------------------------------------------------------------------------"
|
||||
)
|
||||
for ka in [
|
||||
"synonyms",
|
||||
"related" if "related" in v else "near synonyms",
|
||||
"near antonyms",
|
||||
"antonyms",
|
||||
]:
|
||||
print(f"{ka:^13}: {' | '.join(v[ka])}")
|
||||
print("--------------------------------------------------------------------------------")
|
||||
print(
|
||||
"--------------------------------------------------------------------------------"
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user