|
import spacy |
|
from spacy.training import Example |
|
from spacy.util import minibatch, compounding |
|
from pathlib import Path |
|
from spacy.tokens import DocBin |
|
import random |
|
import shutil |
|
|
|
|
|
def load_data_from_spacy_file(file_path): |
|
|
|
nlp = spacy.blank("en") |
|
|
|
|
|
try: |
|
doc_bin = DocBin().from_disk(file_path) |
|
docs = list(doc_bin.get_docs(nlp.vocab)) |
|
return docs |
|
except Exception as e: |
|
print(f"Error loading data from .spacy file: {e}") |
|
return [] |
|
|
|
|
|
|
|
def train_model(epochs, model_path): |
|
|
|
nlp = spacy.blank("en") |
|
|
|
|
|
if "ner" not in nlp.pipe_names: |
|
ner = nlp.add_pipe("ner") |
|
|
|
nlp.add_pipe("sentencizer") |
|
|
|
|
|
labels = [ |
|
"PERSON", "CONTACT", "EMAIL", "ABOUT", "EXPERIENCE", "YEARS_EXPERIENCE", |
|
"UNIVERSITY", "SOFT_SKILL", "INSTITUTE", "LAST_QUALIFICATION_YEAR", "JOB_TITLE", |
|
"COMPANY", "COURSE", "DOB", "HOBBIES", "LINK", "SCHOOL", "QUALIFICATION", |
|
"LANGUAGE", "LOCATION", "PROJECTS", "SKILL", "CERTIFICATE" |
|
] |
|
|
|
|
|
|
|
for label in labels: |
|
ner.add_label(label) |
|
|
|
|
|
train_data = load_data_from_spacy_file("./data/Spacy_data.spacy") |
|
|
|
|
|
optimizer = nlp.begin_training() |
|
|
|
epoch_losses = [] |
|
best_loss = float('inf') |
|
|
|
|
|
for epoch in range(epochs): |
|
losses = {} |
|
random.shuffle(train_data) |
|
|
|
|
|
batches = minibatch(train_data, size=compounding(4.0, 32.0, 1.001)) |
|
|
|
for batch in batches: |
|
texts, annotations = zip(*[(doc.text, {"entities": [(ent.start_char, ent.end_char, ent.label_) for ent in doc.ents]}) for doc in batch]) |
|
|
|
|
|
examples = [Example.from_dict(nlp.make_doc(text), annotation) for text, annotation in zip(texts, annotations)] |
|
|
|
|
|
nlp.update(examples, sgd=optimizer, drop=0.35, losses=losses) |
|
|
|
current_loss = losses.get("ner", float('inf')) |
|
epoch_losses.append(current_loss) |
|
|
|
print(f"Losses at epoch {epoch + 1}: {losses}") |
|
|
|
|
|
if current_loss == 0: |
|
break |
|
|
|
|
|
if current_loss < best_loss: |
|
best_loss = current_loss |
|
|
|
temp_model_path = model_path + "_temp" |
|
nlp.to_disk(temp_model_path) |
|
|
|
|
|
if os.path.exists(model_path): |
|
shutil.rmtree(model_path) |
|
shutil.copytree(temp_model_path, model_path) |
|
shutil.rmtree(temp_model_path) |
|
|
|
|
|
nlp.to_disk(model_path) |
|
|
|
return epoch_losses |