Skip to content

Instantly share code, notes, and snippets.

import wikipediaapi
import pandas as pd
def wiki_page(page_name):
wiki_api = wikipediaapi.Wikipedia(language='en',
extract_format=wikipediaapi.ExtractFormat.WIKI)
page_name = wiki_api.page(page_name)
if not page_name.exists():
print('page does not exist')
return
import hmni
# Initialize a Matcher Object
matcher = hmni.Matcher(model='latin')
# Single Pair Similarity
matcher.similarity('Alan', 'Al')
# 0.6838301782536617
matcher.similarity('Alan', 'Al', prob=False)
# 1
import pandas as pd
import re
import spacy
import neuralcoref
nlp = spacy.load('en_core_web_lg')
neuralcoref.add_to_pipe(nlp)
def entity_pairs(text, coref=True):
import networkx as nx
import matplotlib.pyplot as plt
def draw_kg(pairs):
k_graph = nx.from_pandas_edgelist(pairs, 'subject', 'object',
create_using=nx.MultiDiGraph())
node_deg = nx.degree(k_graph)
layout = nx.spring_layout(k_graph, k=0.15, iterations=20)
plt.figure(num=None, figsize=(120, 90), dpi=80)