[project] name = "mobilellm-r1-950m" version = "0.1.0" description = "mlx_lm_for_mobile_llm_r1" readme = "README.md" requires-python = ">=3.13" dependencies = [ "mlx>=0.29.1", "mlx-lm>=0.27.1", "safetensors>=0.6.2", "transformers>=4.56.1", ] [dependency-groups] dev = [ "torch>=2.8.0", ] [tool.hatch.build.targets.wheel] packages = ["custom_mlx_lm"] [project.scripts] mobilellm-infer = "custom_mlx_lm.inference_mlx_lm:main"