-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexploring_the_hugging_face_platform.py
More file actions
38 lines (25 loc) · 1.09 KB
/
exploring_the_hugging_face_platform.py
File metadata and controls
38 lines (25 loc) · 1.09 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# -*- coding: utf-8 -*-
"""Exploring the Hugging Face Platform.ipynb
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1fsc-c8TTjqQvV2txQDrSJ5bp8p2suPIZ
**Using the Access Token**
"""
from huggingface_hub import login
# Replace 'your-access-token' with your actual Hugging Face access token
login(token="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")
"""## **Testing the Setup**"""
from transformers import AutoModelForMaskedLM, AutoTokenizer, pipeline
# Load the pre-trained model and tokenizer from Hugging Face
model_name = "bert-base-uncased"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForMaskedLM.from_pretrained(model_name)
# Create a pipeline for masked language modeling
nlp_pipeline = pipeline("fill-mask", model=model, tokenizer=tokenizer)
# Test the pipeline with a simple input
test_sentence = "The quick brown fox jumps over the [MASK] dog."
result = nlp_pipeline(test_sentence)
print(result)
test_sentence1 = "I am coming to your [MASK] to eat along with you."
result1 = nlp_pipeline(test_sentence1)
print(result1)