import{s as P,n as O,o as ss}from"../chunks/scheduler.7da89386.js";import{S as ls,i as es,g as T,s as t,r as j,A as as,h as b,f as e,c as n,j as K,u as U,x as v,k as D,y as ts,a,v as w,d as Z,t as W,w as k}from"../chunks/index.20910acc.js";import{C as x}from"../chunks/CodeBlock.143bd81e.js";import{H as S,E as ns}from"../chunks/getInferenceSnippets.7cf363b6.js";function rs(q){let r,V,G,f,p,I,c,Q="To run the 🤗 Transformers examples make sure you have installed the following libraries:",X,M,R,i,_,o,A='The metrics in evaluate
can be easily integrated with the Trainer. The Trainer
accepts a compute_metrics
keyword argument that passes a function to compute metrics. One can specify the evaluation interval with evaluation_strategy
in the TrainerArguments
, and based on that, the model is evaluated accordingly, and the predictions and labels passed to compute_metrics
.',C,d,E,m,F,y,$='We can use the Seq2SeqTrainer for sequence-to-sequence tasks such as translation or summarization. For such generative tasks usually metrics such as ROUGE or BLEU are evaluated. However, these metrics require that we generate some text with the model rather than a single forward pass as with e.g. classification. The Seq2SeqTrainer
allows for the use of the generate method when setting predict_with_generate=True
which will generate text for each sample in the evaluation set. That means we evaluate generated text within the compute_metric
function. We just need to decode the predictions and labels first.',g,J,z,u,L="You can use any evaluate
metric with the Trainer
and Seq2SeqTrainer
as long as they are compatible with the task and predictions. In case you don’t want to train a model but just evaluate an existing model you can replace trainer.train()
with trainer.evaluate()
in the above scripts.",Y,h,N,B,H;return p=new S({props:{title:"🤗 Transformers",local:"-transformers",headingTag:"h1"}}),M=new x({props:{code:"cGlwJTIwaW5zdGFsbCUyMGRhdGFzZXRzJTIwdHJhbnNmb3JtZXJzJTIwdG9yY2glMjBldmFsdWF0ZSUyMG5sdGslMjByb3VnZV9zY29yZQ==",highlighted:"pip install datasets transformers torch evaluate nltk rouge_score",wrap:!1}}),i=new S({props:{title:"Trainer",local:"trainer",headingTag:"h2"}}),d=new x({props:{code:"ZnJvbSUyMGRhdGFzZXRzJTIwaW1wb3J0JTIwbG9hZF9kYXRhc2V0JTBBZnJvbSUyMHRyYW5zZm9ybWVycyUyMGltcG9ydCUyMEF1dG9Ub2tlbml6ZXIlMkMlMjBBdXRvTW9kZWxGb3JTZXF1ZW5jZUNsYXNzaWZpY2F0aW9uJTJDJTIwVHJhaW5pbmdBcmd1bWVudHMlMkMlMjBUcmFpbmVyJTBBaW1wb3J0JTIwbnVtcHklMjBhcyUyMG5wJTBBaW1wb3J0JTIwZXZhbHVhdGUlMEElMEElMjMlMjBQcmVwYXJlJTIwYW5kJTIwdG9rZW5pemUlMjBkYXRhc2V0JTBBZGF0YXNldCUyMCUzRCUyMGxvYWRfZGF0YXNldCglMjJ5ZWxwX3Jldmlld19mdWxsJTIyKSUwQXRva2VuaXplciUyMCUzRCUyMEF1dG9Ub2tlbml6ZXIuZnJvbV9wcmV0cmFpbmVkKCUyMmJlcnQtYmFzZS1jYXNlZCUyMiklMEElMEFkZWYlMjB0b2tlbml6ZV9mdW5jdGlvbihleGFtcGxlcyklM0ElMEElMjAlMjAlMjAlMjByZXR1cm4lMjB0b2tlbml6ZXIoZXhhbXBsZXMlNUIlMjJ0ZXh0JTIyJTVEJTJDJTIwcGFkZGluZyUzRCUyMm1heF9sZW5ndGglMjIlMkMlMjB0cnVuY2F0aW9uJTNEVHJ1ZSklMEElMEF0b2tlbml6ZWRfZGF0YXNldHMlMjAlM0QlMjBkYXRhc2V0Lm1hcCh0b2tlbml6ZV9mdW5jdGlvbiUyQyUyMGJhdGNoZWQlM0RUcnVlKSUwQSUwQXNtYWxsX3RyYWluX2RhdGFzZXQlMjAlM0QlMjB0b2tlbml6ZWRfZGF0YXNldHMlNUIlMjJ0cmFpbiUyMiU1RC5zaHVmZmxlKHNlZWQlM0Q0Mikuc2VsZWN0KHJhbmdlKDIwMCkpJTBBc21hbGxfZXZhbF9kYXRhc2V0JTIwJTNEJTIwdG9rZW5pemVkX2RhdGFzZXRzJTVCJTIydGVzdCUyMiU1RC5zaHVmZmxlKHNlZWQlM0Q0Mikuc2VsZWN0KHJhbmdlKDIwMCkpJTBBJTBBJTIzJTIwU2V0dXAlMjBldmFsdWF0aW9uJTIwJTBBbWV0cmljJTIwJTNEJTIwZXZhbHVhdGUubG9hZCglMjJhY2N1cmFjeSUyMiklMEElMEFkZWYlMjBjb21wdXRlX21ldHJpY3MoZXZhbF9wcmVkKSUzQSUwQSUyMCUyMCUyMCUyMGxvZ2l0cyUyQyUyMGxhYmVscyUyMCUzRCUyMGV2YWxfcHJlZCUwQSUyMCUyMCUyMCUyMHByZWRpY3Rpb25zJTIwJTNEJTIwbnAuYXJnbWF4KGxvZ2l0cyUyQyUyMGF4aXMlM0QtMSklMEElMjAlMjAlMjAlMjByZXR1cm4lMjBtZXRyaWMuY29tcHV0ZShwcmVkaWN0aW9ucyUzRHByZWRpY3Rpb25zJTJDJTIwcmVmZXJlbmNlcyUzRGxhYmVscyklMEElMEElMjMlMjBMb2FkJTIwcHJldHJhaW5lZCUyMG1vZGVsJTIwYW5kJTIwZXZhbHVhdGUlMjBtb2RlbCUyMGFmdGVyJTIwZWFjaCUyMGVwb2NoJTBBbW9kZWwlMjAlM0QlMjBBdXRvTW9kZWxGb3JTZXF1ZW5jZUNsYXNzaWZpY2F0aW9uLmZyb21fcHJldHJhaW5lZCglMjJiZXJ0LWJhc2UtY2FzZWQlMjIlMkMlMjBudW1fbGFiZWxzJTNENSklMEF0cmFpbmluZ19hcmdzJTIwJTNEJTIwVHJhaW5pbmdBcmd1bWVudHMob3V0cHV0X2RpciUzRCUyMnRlc3RfdHJhaW5lciUyMiUyQyUyMGV2YWx1YXRpb25fc3RyYXRlZ3klM0QlMjJlcG9jaCUyMiklMEElMEF0cmFpbmVyJTIwJTNEJTIwVHJhaW5lciglMEElMjAlMjAlMjAlMjBtb2RlbCUzRG1vZGVsJTJDJTBBJTIwJTIwJTIwJTIwYXJncyUzRHRyYWluaW5nX2FyZ3MlMkMlMEElMjAlMjAlMjAlMjB0cmFpbl9kYXRhc2V0JTNEc21hbGxfdHJhaW5fZGF0YXNldCUyQyUwQSUyMCUyMCUyMCUyMGV2YWxfZGF0YXNldCUzRHNtYWxsX2V2YWxfZGF0YXNldCUyQyUwQSUyMCUyMCUyMCUyMGNvbXB1dGVfbWV0cmljcyUzRGNvbXB1dGVfbWV0cmljcyUyQyUwQSklMEElMEF0cmFpbmVyLnRyYWluKCk=",highlighted:`from datasets import load_dataset
from transformers import AutoTokenizer, AutoModelForSequenceClassification, TrainingArguments, Trainer
import numpy as np
import evaluate
dataset = load_dataset("yelp_review_full")
tokenizer = AutoTokenizer.from_pretrained("bert-base-cased")
def tokenize_function(examples):
return tokenizer(examples["text"], padding="max_length", truncation=True)
tokenized_datasets = dataset.map(tokenize_function, batched=True)
small_train_dataset = tokenized_datasets["train"].shuffle(seed=42).select(range(200))
small_eval_dataset = tokenized_datasets["test"].shuffle(seed=42).select(range(200))
metric = evaluate.load("accuracy")
def compute_metrics(eval_pred):
logits, labels = eval_pred
predictions = np.argmax(logits, axis=-1)
return metric.compute(predictions=predictions, references=labels)
model = AutoModelForSequenceClassification.from_pretrained("bert-base-cased", num_labels=5)
training_args = TrainingArguments(output_dir="test_trainer", evaluation_strategy="epoch")
trainer = Trainer(
model=model,
args=training_args,
train_dataset=small_train_dataset,
eval_dataset=small_eval_dataset,
compute_metrics=compute_metrics,
)
trainer.train()`,wrap:!1}}),m=new S({props:{title:"Seq2SeqTrainer",local:"seq2seqtrainer",headingTag:"h2"}}),J=new x({props:{code:"aW1wb3J0JTIwbmx0ayUwQWZyb20lMjBkYXRhc2V0cyUyMGltcG9ydCUyMGxvYWRfZGF0YXNldCUwQWltcG9ydCUyMGV2YWx1YXRlJTBBaW1wb3J0JTIwbnVtcHklMjBhcyUyMG5wJTBBZnJvbSUyMHRyYW5zZm9ybWVycyUyMGltcG9ydCUyMEF1dG9Ub2tlbml6ZXIlMkMlMjBEYXRhQ29sbGF0b3JGb3JTZXEyU2VxJTBBZnJvbSUyMHRyYW5zZm9ybWVycyUyMGltcG9ydCUyMEF1dG9Nb2RlbEZvclNlcTJTZXFMTSUyQyUyMFNlcTJTZXFUcmFpbmluZ0FyZ3VtZW50cyUyQyUyMFNlcTJTZXFUcmFpbmVyJTBBJTBBJTIzJTIwUHJlcGFyZSUyMGFuZCUyMHRva2VuaXplJTIwZGF0YXNldCUwQWJpbGxzdW0lMjAlM0QlMjBsb2FkX2RhdGFzZXQoJTIyYmlsbHN1bSUyMiUyQyUyMHNwbGl0JTNEJTIyY2FfdGVzdCUyMikuc2h1ZmZsZShzZWVkJTNENDIpLnNlbGVjdChyYW5nZSgyMDApKSUwQWJpbGxzdW0lMjAlM0QlMjBiaWxsc3VtLnRyYWluX3Rlc3Rfc3BsaXQodGVzdF9zaXplJTNEMC4yKSUwQXRva2VuaXplciUyMCUzRCUyMEF1dG9Ub2tlbml6ZXIuZnJvbV9wcmV0cmFpbmVkKCUyMnQ1LXNtYWxsJTIyKSUwQXByZWZpeCUyMCUzRCUyMCUyMnN1bW1hcml6ZSUzQSUyMCUyMiUwQSUwQWRlZiUyMHByZXByb2Nlc3NfZnVuY3Rpb24oZXhhbXBsZXMpJTNBJTBBJTIwJTIwJTIwJTIwaW5wdXRzJTIwJTNEJTIwJTVCcHJlZml4JTIwJTJCJTIwZG9jJTIwZm9yJTIwZG9jJTIwaW4lMjBleGFtcGxlcyU1QiUyMnRleHQlMjIlNUQlNUQlMEElMjAlMjAlMjAlMjBtb2RlbF9pbnB1dHMlMjAlM0QlMjB0b2tlbml6ZXIoaW5wdXRzJTJDJTIwbWF4X2xlbmd0aCUzRDEwMjQlMkMlMjB0cnVuY2F0aW9uJTNEVHJ1ZSklMEElMEElMjAlMjAlMjAlMjBsYWJlbHMlMjAlM0QlMjB0b2tlbml6ZXIodGV4dF90YXJnZXQlM0RleGFtcGxlcyU1QiUyMnN1bW1hcnklMjIlNUQlMkMlMjBtYXhfbGVuZ3RoJTNEMTI4JTJDJTIwdHJ1bmNhdGlvbiUzRFRydWUpJTBBJTBBJTIwJTIwJTIwJTIwbW9kZWxfaW5wdXRzJTVCJTIybGFiZWxzJTIyJTVEJTIwJTNEJTIwbGFiZWxzJTVCJTIyaW5wdXRfaWRzJTIyJTVEJTBBJTIwJTIwJTIwJTIwcmV0dXJuJTIwbW9kZWxfaW5wdXRzJTBBJTBBdG9rZW5pemVkX2JpbGxzdW0lMjAlM0QlMjBiaWxsc3VtLm1hcChwcmVwcm9jZXNzX2Z1bmN0aW9uJTJDJTIwYmF0Y2hlZCUzRFRydWUpJTBBJTBBJTIzJTIwU2V0dXAlMjBldmFsdWF0aW9uJTBBbmx0ay5kb3dubG9hZCglMjJwdW5rdF90YWIlMjIlMkMlMjBxdWlldCUzRFRydWUpJTBBbWV0cmljJTIwJTNEJTIwZXZhbHVhdGUubG9hZCglMjJyb3VnZSUyMiklMEElMEFkZWYlMjBjb21wdXRlX21ldHJpY3MoZXZhbF9wcmVkcyklM0ElMEElMjAlMjAlMjAlMjBwcmVkcyUyQyUyMGxhYmVscyUyMCUzRCUyMGV2YWxfcHJlZHMlMEElMEElMjAlMjAlMjAlMjAlMjMlMjBkZWNvZGUlMjBwcmVkcyUyMGFuZCUyMGxhYmVscyUwQSUyMCUyMCUyMCUyMGxhYmVscyUyMCUzRCUyMG5wLndoZXJlKGxhYmVscyUyMCElM0QlMjAtMTAwJTJDJTIwbGFiZWxzJTJDJTIwdG9rZW5pemVyLnBhZF90b2tlbl9pZCklMEElMjAlMjAlMjAlMjBkZWNvZGVkX3ByZWRzJTIwJTNEJTIwdG9rZW5pemVyLmJhdGNoX2RlY29kZShwcmVkcyUyQyUyMHNraXBfc3BlY2lhbF90b2tlbnMlM0RUcnVlKSUwQSUyMCUyMCUyMCUyMGRlY29kZWRfbGFiZWxzJTIwJTNEJTIwdG9rZW5pemVyLmJhdGNoX2RlY29kZShsYWJlbHMlMkMlMjBza2lwX3NwZWNpYWxfdG9rZW5zJTNEVHJ1ZSklMEElMEElMjAlMjAlMjAlMjAlMjMlMjByb3VnZUxTdW0lMjBleHBlY3RzJTIwbmV3bGluZSUyMGFmdGVyJTIwZWFjaCUyMHNlbnRlbmNlJTBBJTIwJTIwJTIwJTIwZGVjb2RlZF9wcmVkcyUyMCUzRCUyMCU1QiUyMiU1Q24lMjIuam9pbihubHRrLnNlbnRfdG9rZW5pemUocHJlZC5zdHJpcCgpKSklMjBmb3IlMjBwcmVkJTIwaW4lMjBkZWNvZGVkX3ByZWRzJTVEJTBBJTIwJTIwJTIwJTIwZGVjb2RlZF9sYWJlbHMlMjAlM0QlMjAlNUIlMjIlNUNuJTIyLmpvaW4obmx0ay5zZW50X3Rva2VuaXplKGxhYmVsLnN0cmlwKCkpKSUyMGZvciUyMGxhYmVsJTIwaW4lMjBkZWNvZGVkX2xhYmVscyU1RCUwQSUwQSUyMCUyMCUyMCUyMHJlc3VsdCUyMCUzRCUyMG1ldHJpYy5jb21wdXRlKHByZWRpY3Rpb25zJTNEZGVjb2RlZF9wcmVkcyUyQyUyMHJlZmVyZW5jZXMlM0RkZWNvZGVkX2xhYmVscyUyQyUyMHVzZV9zdGVtbWVyJTNEVHJ1ZSklMEElMjAlMjAlMjAlMjByZXR1cm4lMjByZXN1bHQlMEElMEElMjMlMjBMb2FkJTIwcHJldHJhaW5lZCUyMG1vZGVsJTIwYW5kJTIwZXZhbHVhdGUlMjBtb2RlbCUyMGFmdGVyJTIwZWFjaCUyMGVwb2NoJTBBbW9kZWwlMjAlM0QlMjBBdXRvTW9kZWxGb3JTZXEyU2VxTE0uZnJvbV9wcmV0cmFpbmVkKCUyMnQ1LXNtYWxsJTIyKSUwQWRhdGFfY29sbGF0b3IlMjAlM0QlMjBEYXRhQ29sbGF0b3JGb3JTZXEyU2VxKHRva2VuaXplciUzRHRva2VuaXplciUyQyUyMG1vZGVsJTNEbW9kZWwpJTBBJTBBdHJhaW5pbmdfYXJncyUyMCUzRCUyMFNlcTJTZXFUcmFpbmluZ0FyZ3VtZW50cyglMEElMjAlMjAlMjAlMjBvdXRwdXRfZGlyJTNEJTIyLiUyRnJlc3VsdHMlMjIlMkMlMEElMjAlMjAlMjAlMjBldmFsdWF0aW9uX3N0cmF0ZWd5JTNEJTIyZXBvY2glMjIlMkMlMEElMjAlMjAlMjAlMjBsZWFybmluZ19yYXRlJTNEMmUtNSUyQyUwQSUyMCUyMCUyMCUyMHBlcl9kZXZpY2VfdHJhaW5fYmF0Y2hfc2l6ZSUzRDE2JTJDJTBBJTIwJTIwJTIwJTIwcGVyX2RldmljZV9ldmFsX2JhdGNoX3NpemUlM0Q0JTJDJTBBJTIwJTIwJTIwJTIwd2VpZ2h0X2RlY2F5JTNEMC4wMSUyQyUwQSUyMCUyMCUyMCUyMHNhdmVfdG90YWxfbGltaXQlM0QzJTJDJTBBJTIwJTIwJTIwJTIwbnVtX3RyYWluX2Vwb2NocyUzRDIlMkMlMEElMjAlMjAlMjAlMjBmcDE2JTNEVHJ1ZSUyQyUwQSUyMCUyMCUyMCUyMHByZWRpY3Rfd2l0aF9nZW5lcmF0ZSUzRFRydWUlMEEpJTBBJTBBdHJhaW5lciUyMCUzRCUyMFNlcTJTZXFUcmFpbmVyKCUwQSUyMCUyMCUyMCUyMG1vZGVsJTNEbW9kZWwlMkMlMEElMjAlMjAlMjAlMjBhcmdzJTNEdHJhaW5pbmdfYXJncyUyQyUwQSUyMCUyMCUyMCUyMHRyYWluX2RhdGFzZXQlM0R0b2tlbml6ZWRfYmlsbHN1bSU1QiUyMnRyYWluJTIyJTVEJTJDJTBBJTIwJTIwJTIwJTIwZXZhbF9kYXRhc2V0JTNEdG9rZW5pemVkX2JpbGxzdW0lNUIlMjJ0ZXN0JTIyJTVEJTJDJTBBJTIwJTIwJTIwJTIwdG9rZW5pemVyJTNEdG9rZW5pemVyJTJDJTBBJTIwJTIwJTIwJTIwZGF0YV9jb2xsYXRvciUzRGRhdGFfY29sbGF0b3IlMkMlMEElMjAlMjAlMjAlMjBjb21wdXRlX21ldHJpY3MlM0Rjb21wdXRlX21ldHJpY3MlMEEpJTBBJTBBdHJhaW5lci50cmFpbigp",highlighted:`import nltk
from datasets import load_dataset
import evaluate
import numpy as np
from transformers import AutoTokenizer, DataCollatorForSeq2Seq
from transformers import AutoModelForSeq2SeqLM, Seq2SeqTrainingArguments, Seq2SeqTrainer
billsum = load_dataset("billsum", split="ca_test").shuffle(seed=42).select(range(200))
billsum = billsum.train_test_split(test_size=0.2)
tokenizer = AutoTokenizer.from_pretrained("t5-small")
prefix = "summarize: "
def preprocess_function(examples):
inputs = [prefix + doc for doc in examples["text"]]
model_inputs = tokenizer(inputs, max_length=1024, truncation=True)
labels = tokenizer(text_target=examples["summary"], max_length=128, truncation=True)
model_inputs["labels"] = labels["input_ids"]
return model_inputs
tokenized_billsum = billsum.map(preprocess_function, batched=True)
nltk.download("punkt_tab", quiet=True)
metric = evaluate.load("rouge")
def compute_metrics(eval_preds):
preds, labels = eval_preds
labels = np.where(labels != -100, labels, tokenizer.pad_token_id)
decoded_preds = tokenizer.batch_decode(preds, skip_special_tokens=True)
decoded_labels = tokenizer.batch_decode(labels, skip_special_tokens=True)
decoded_preds = ["\\n".join(nltk.sent_tokenize(pred.strip())) for pred in decoded_preds]
decoded_labels = ["\\n".join(nltk.sent_tokenize(label.strip())) for label in decoded_labels]
result = metric.compute(predictions=decoded_preds, references=decoded_labels, use_stemmer=True)
return result
model = AutoModelForSeq2SeqLM.from_pretrained("t5-small")
data_collator = DataCollatorForSeq2Seq(tokenizer=tokenizer, model=model)
training_args = Seq2SeqTrainingArguments(
output_dir="./results",
evaluation_strategy="epoch",
learning_rate=2e-5,
per_device_train_batch_size=16,
per_device_eval_batch_size=4,
weight_decay=0.01,
save_total_limit=3,
num_train_epochs=2,
fp16=True,
predict_with_generate=True
)
trainer = Seq2SeqTrainer(
model=model,
args=training_args,
train_dataset=tokenized_billsum["train"],
eval_dataset=tokenized_billsum["test"],
tokenizer=tokenizer,
data_collator=data_collator,
compute_metrics=compute_metrics
)
trainer.train()`,wrap:!1}}),h=new ns({props:{source:"https://github.com/huggingface/evaluate/blob/main/docs/source/transformers_integrations.mdx"}}),{c(){r=T("meta"),V=t(),G=T("p"),f=t(),j(p.$$.fragment),I=t(),c=T("p"),c.textContent=Q,X=t(),j(M.$$.fragment),R=t(),j(i.$$.fragment),_=t(),o=T("p"),o.innerHTML=A,C=t(),j(d.$$.fragment),E=t(),j(m.$$.fragment),F=t(),y=T("p"),y.innerHTML=$,g=t(),j(J.$$.fragment),z=t(),u=T("p"),u.innerHTML=L,Y=t(),j(h.$$.fragment),N=t(),B=T("p"),this.h()},l(s){const l=as("svelte-u9bgzb",document.head);r=b(l,"META",{name:!0,content:!0}),l.forEach(e),V=n(s),G=b(s,"P",{}),K(G).forEach(e),f=n(s),U(p.$$.fragment,s),I=n(s),c=b(s,"P",{"data-svelte-h":!0}),v(c)!=="svelte-hhpzuu"&&(c.textContent=Q),X=n(s),U(M.$$.fragment,s),R=n(s),U(i.$$.fragment,s),_=n(s),o=b(s,"P",{"data-svelte-h":!0}),v(o)!=="svelte-d1oj50"&&(o.innerHTML=A),C=n(s),U(d.$$.fragment,s),E=n(s),U(m.$$.fragment,s),F=n(s),y=b(s,"P",{"data-svelte-h":!0}),v(y)!=="svelte-1xbwtuk"&&(y.innerHTML=$),g=n(s),U(J.$$.fragment,s),z=n(s),u=b(s,"P",{"data-svelte-h":!0}),v(u)!=="svelte-1x0y3m4"&&(u.innerHTML=L),Y=n(s),U(h.$$.fragment,s),N=n(s),B=b(s,"P",{}),K(B).forEach(e),this.h()},h(){D(r,"name","hf:doc:metadata"),D(r,"content",ps)},m(s,l){ts(document.head,r),a(s,V,l),a(s,G,l),a(s,f,l),w(p,s,l),a(s,I,l),a(s,c,l),a(s,X,l),w(M,s,l),a(s,R,l),w(i,s,l),a(s,_,l),a(s,o,l),a(s,C,l),w(d,s,l),a(s,E,l),w(m,s,l),a(s,F,l),a(s,y,l),a(s,g,l),w(J,s,l),a(s,z,l),a(s,u,l),a(s,Y,l),w(h,s,l),a(s,N,l),a(s,B,l),H=!0},p:O,i(s){H||(Z(p.$$.fragment,s),Z(M.$$.fragment,s),Z(i.$$.fragment,s),Z(d.$$.fragment,s),Z(m.$$.fragment,s),Z(J.$$.fragment,s),Z(h.$$.fragment,s),H=!0)},o(s){W(p.$$.fragment,s),W(M.$$.fragment,s),W(i.$$.fragment,s),W(d.$$.fragment,s),W(m.$$.fragment,s),W(J.$$.fragment,s),W(h.$$.fragment,s),H=!1},d(s){s&&(e(V),e(G),e(f),e(I),e(c),e(X),e(R),e(_),e(o),e(C),e(E),e(F),e(y),e(g),e(z),e(u),e(Y),e(N),e(B)),e(r),k(p,s),k(M,s),k(i,s),k(d,s),k(m,s),k(J,s),k(h,s)}}}const ps='{"title":"🤗 Transformers","local":"-transformers","sections":[{"title":"Trainer","local":"trainer","sections":[],"depth":2},{"title":"Seq2SeqTrainer","local":"seq2seqtrainer","sections":[],"depth":2}],"depth":1}';function cs(q){return ss(()=>{new URLSearchParams(window.location.search).get("fw")}),[]}class ms extends ls{constructor(r){super(),es(this,r,cs,rs,P,{})}}export{ms as component};