Model Card for Model ID
T5-XL seq2seq model trained on WebNLG dataset
Use
from typing import List, Tuple
from transformers import pipeline
def prepare_text(triplets: List[Tuple[str, str, str]]):
graph = "[graph]"
for triplet in triplets:
graph += f"[head] {triplet[0]} [relation] {triplet[1]} [tail] {triplet[2]} "
graph += "[text]</s>"
return graph
g2t_model = pipeline(task="text2text-generation", model="s-nlp/g2t-t5-xl-webnlg")
graph = prepare_text([
("London", "capital_of", "United Kingdom"),
("London", "population", "8,799,728")
])
g2t_model(graph)
# [{'generated_text': 'London is the capital of the United Kingdom and has a population of 8,799,7'}]
- Downloads last month
- 25
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social
visibility and check back later, or deploy to Inference Endpoints (dedicated)
instead.