import mii
import os
os.environ["MASTER_ADDR"] = "localhost"
os.environ["MASTER_PORT"] = "29505" # Your chosen port
model_config = {
"torch_dist_port": 29505, # Your custom port
# Other configs if needed...
}
pipe = mii.pipeline("/data/home/zengxiangxi/hf_model/qwen3-0.6b",model_config=model_config,)
response = pipe(["DeepSpeed is", "Seattle is"], max_new_tokens=64)
print(response)