quaint-midnight-92440
05/24/2025, 6:26 AMfreezing-airport-6809
quaint-midnight-92440
05/25/2025, 3:04 AMpowerful-horse-58724
05/27/2025, 12:19 AMquaint-midnight-92440
05/27/2025, 2:00 AMquaint-midnight-92440
05/29/2025, 3:23 AMpowerful-horse-58724
05/29/2025, 6:26 AMquaint-midnight-92440
05/29/2025, 7:28 AMpowerful-horse-58724
05/29/2025, 5:40 PM@task(
container_image=container_image,
requests=Resources(cpu="2", mem="2Gi"),
secret_requests=[Secret(group=None, key="hf_token")],
)
def upload_model_to_hub(model: torch.nn.Module, repo_name: str) -> str:
from huggingface_hub import HfApi
# Get the Flyte context and define the model path
ctx = current_context()
model_path = "best_model.pth" # Save the model locally as "best_model.pth"
# Save the model's state dictionary
torch.save(model.state_dict(), model_path)
# Set Hugging Face token from local environment or Flyte secrets
hf_token = os.getenv("HF_TOKEN")
if hf_token is None:
# If HF_TOKEN is not found, attempt to get it from the Flyte secrets
hf_token = ctx.secrets.get(key="hf_token")
print("Using Hugging Face token from Flyte secrets.")
else:
print("Using Hugging Face token from environment variable.")
# Create a new repository (if it doesn't exist) on Hugging Face Hub
api = HfApi()
api.create_repo(repo_name, token=hf_token, exist_ok=True)
# Upload the model to the Hugging Face repository
api.upload_file(
path_or_fileobj=model_path, # Path to the local file
path_in_repo="pytorch_model.bin", # Destination path in the repo
repo_id=repo_name,
commit_message="Upload Faster R-CNN model",
token=hf_token,
)
return f"Model uploaded to Hugging Face Hub: <https://huggingface.co/{repo_name}>"powerful-horse-58724
05/29/2025, 5:41 PM@task(
container_image=container_image,
cache=True,
cache_version="1",
requests=Resources(cpu="2", mem="2Gi"),
)
def download_model(model_name: str) -> FlyteDirectory:
from transformers import AutoModelForSequenceClassification, AutoTokenizer
working_dir = Path(current_context().working_directory)
saved_model_dir = working_dir / "saved_model"
saved_model_dir.mkdir(parents=True, exist_ok=True)
model = AutoModelForSequenceClassification.from_pretrained(
model_name,
device_map="cpu",
torch_dtype="auto",
trust_remote_code=True,
)
tokenizer = AutoTokenizer.from_pretrained(model_name)
model.save_pretrained(saved_model_dir)
tokenizer.save_pretrained(saved_model_dir)
return FlyteDirectory(saved_model_dir)quaint-midnight-92440
05/30/2025, 10:31 AM