diff options
| author | Gus Power <gus@infinitesidequests.com> | 2025-05-25 09:15:54 +0100 |
|---|---|---|
| committer | Gus Power <gus@infinitesidequests.com> | 2025-05-25 09:15:54 +0100 |
| commit | 60e78e2057e2197bb2b13d162c7cab26b0374dcb (patch) | |
| tree | 908cf0558bd239b9d6a5d5c3f7ecb47243b56e0f | |
a few snippets
| -rw-r--r-- | 0001_whisper_transcribe.py | 9 | ||||
| -rw-r--r-- | 0002_rocm_check_gpu.py | 6 | ||||
| -rw-r--r-- | README.md | 5 |
3 files changed, 20 insertions, 0 deletions
diff --git a/0001_whisper_transcribe.py b/0001_whisper_transcribe.py new file mode 100644 index 0000000..44e8b3f --- /dev/null +++ b/0001_whisper_transcribe.py @@ -0,0 +1,9 @@ +import whisper +import sys + +input_file = sys.argv[1] +model = whisper.load_model("small") # You can also try "base", "small", "medium" +result = model.transcribe(input_file, verbose=True) +print(input_file) +print(result["text"]) + diff --git a/0002_rocm_check_gpu.py b/0002_rocm_check_gpu.py new file mode 100644 index 0000000..8c92adb --- /dev/null +++ b/0002_rocm_check_gpu.py @@ -0,0 +1,6 @@ +import torch +print(torch.cuda.is_available()) # Should be False +print(torch.backends.mps.is_available()) # Also False for AMD +print(torch.version.hip) # Should show ROCm version +print(torch.version.cuda) # Should be None + diff --git a/README.md b/README.md new file mode 100644 index 0000000..facc364 --- /dev/null +++ b/README.md @@ -0,0 +1,5 @@ +# Snippets + +A collection of code-y bits n bobs for the bored and curious + + |
