A Comprehensive Implementation Guide to ModelScope for Model Search, Inference, Fine-Tuning, Evaluation, and Export


print("\n📊 MODEL EVALUATION\n")


eval_results = trainer.evaluate()
print("  Evaluation Results:")
for key, value in eval_results.items():
   if isinstance(value, float):
       print(f"    {key:<25}: {value:.4f}")


from sklearn.metrics import classification_report, confusion_matrix


preds_output = trainer.predict(eval_ds)
preds = np.argmax(preds_output.predictions, axis=-1)
labels = preds_output.label_ids


print("\n  Classification Report:")
print(classification_report(labels, preds, target_names=["NEGATIVE", "POSITIVE"]))


cm = confusion_matrix(labels, preds)
fig, ax = plt.subplots(figsize=(5, 4))
im = ax.imshow(cm, cmap="Blues")
ax.set_xticks([0, 1]); ax.set_yticks([0, 1])
ax.set_xticklabels(["NEGATIVE", "POSITIVE"])
ax.set_yticklabels(["NEGATIVE", "POSITIVE"])
ax.set_xlabel("Predicted"); ax.set_ylabel("Actual")
ax.set_title("Confusion Matrix — Fine-Tuned DistilBERT")
for i in range(2):
   for j in range(2):
       ax.text(j, i, str(cm[i, j]), ha="center", va="center",
               color="white" if cm[i, j] > cm.max()/2 else "black", fontsize=18)
plt.colorbar(im)
plt.tight_layout()
plt.savefig("confusion_matrix.png", dpi=150)
plt.show()
print("  ✅ Saved confusion_matrix.png")


print("\n── Testing Fine-Tuned Model on New Inputs ──")
ft_pipeline = hf_pipeline(
   "sentiment-analysis",
   model=trainer.model,
   tokenizer=tokenizer,
   device=DEVICE,
)


new_reviews = [
   "An absolutely breathtaking masterpiece with brilliant performances!",
   "Waste of two hours. Terrible script and wooden acting.",
   "Decent popcorn movie but nothing special. Had some fun moments.",
]


for review in new_reviews:
   res = ft_pipeline(review)[0]
   emoji = "🟢" if res["label"] == "POSITIVE" else "🔴"
   print(f'  {emoji} {res["label"]} ({res["score"]:.4f}): "{review}"')




print("\n💾 EXPORTING THE FINE-TUNED MODEL\n")


save_path = "./ms_finetuned_model/final"
trainer.save_model(save_path)
tokenizer.save_pretrained(save_path)
print(f"  ✅ Model saved to: {save_path}")
print(f"     Files: {os.listdir(save_path)}")


print("\n── ONNX Export ──")
try:
   from optimum.exporters.onnx import main_export
   onnx_path = "./ms_finetuned_model/onnx"
   main_export(save_path, output=onnx_path, task="text-classification")
   print(f"  ✅ ONNX model exported to: {onnx_path}")
   print(f"     Files: {os.listdir(onnx_path)}")
except Exception as e:
   print(f"  ⚠️  ONNX export skipped: {e}")


print("""
── Upload to ModelScope Hub (manual step) ──


 1. Get a token from https://modelscope.cn/my/myaccesstoken
 2. Run:


    from modelscope.hub.api import HubApi
    api = HubApi()
    api.login('YOUR_TOKEN')
    api.push_model(
        model_id='your-username/my-finetuned-distilbert',
        model_dir="./ms_finetuned_model/final",
    )
""")


print("""
╔══════════════════════════════════════════════════════════════════╗
║                   🎉  TUTORIAL COMPLETE!  🎉                    ║
╠══════════════════════════════════════════════════════════════════╣
║  ✓ ModelScope Hub — search, browse & download models            ║
║  ✓ MsDataset — load datasets from the ModelScope ecosystem      ║
║  ✓ NLP pipelines — sentiment, NER, zero-shot, generation, mask  ║
║  ✓ CV pipelines — image classification, object detection, viz   ║
║  ✓ HuggingFace interop — snapshot_download + Transformers       ║
║  ✓ Fine-tuning — DistilBERT on IMDB with Trainer API            ║
║  ✓ Evaluation — accuracy, F1, confusion matrix                  ║
║  ✓ Export — local save, ONNX, Hub upload                        ║
╚══════════════════════════════════════════════════════════════════╝
""")



Source link

  • Related Posts

    An Implementation Guide to Building a DuckDB-Python Analytics Pipeline with SQL, DataFrames, Parquet, UDFs, and Performance Profiling

    In this tutorial, we build a comprehensive, hands-on understanding of DuckDB-Python by working through its features directly in code on Colab. We start with the fundamentals of connection management and…

    MiniMax Releases MMX-CLI: A Command-Line Interface That Gives AI Agents Native Access to Image, Video, Speech, Music, Vision, and Search

    MiniMax, the AI research company behind the MiniMax omni-modal model stack, has released MMX-CLI — Node.js-based command-line interface that exposes the MiniMax AI platform’s full suite of generative capabilities, both…

    Leave a Reply

    Your email address will not be published. Required fields are marked *