Skip to content

Instantly share code, notes, and snippets.

@costrouc
Created May 7, 2021 15:44
Show Gist options
  • Select an option

  • Save costrouc/abedd12e2b47a2bdf77ac5277f78cc80 to your computer and use it in GitHub Desktop.

Select an option

Save costrouc/abedd12e2b47a2bdf77ac5277f78cc80 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "f16f8b4c-4df6-4e05-bcaf-cc36bd6264ca",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Fri May 7 15:36:34 2021 \n",
"+-----------------------------------------------------------------------------+\n",
"| NVIDIA-SMI 450.51.06 Driver Version: 450.51.06 CUDA Version: 11.0 |\n",
"|-------------------------------+----------------------+----------------------+\n",
"| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n",
"| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n",
"| | | MIG M. |\n",
"|===============================+======================+======================|\n",
"| 0 Tesla K80 Off | 00000000:00:04.0 Off | 0 |\n",
"| N/A 36C P8 26W / 149W | 0MiB / 11441MiB | 0% Default |\n",
"| | | N/A |\n",
"+-------------------------------+----------------------+----------------------+\n",
" \n",
"+-----------------------------------------------------------------------------+\n",
"| Processes: |\n",
"| GPU GI CI PID Type Process name GPU Memory |\n",
"| ID ID Usage |\n",
"|=============================================================================|\n",
"| No running processes found |\n",
"+-----------------------------------------------------------------------------+\n"
]
}
],
"source": [
"! nvidia-smi"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "3fe93a17-fa81-4872-a201-3561c91a3563",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<numba.cuda.cudadrv.devices._DeviceContextManager at 0x7fd370af1070>"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import numba.cuda\n",
"numba.cuda.gpus[0]"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "f0f8d2d9-e1b8-4d7e-8ec9-f965b6c411c0",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<weakproxy at 0x7fd372158540 to Device at 0x7fd372181790>"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"numba.cuda.select_device(0)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "0f5d7a64-fff9-4afc-a70d-dabb5fd03a1c",
"metadata": {},
"outputs": [],
"source": [
"@numba.cuda.jit\n",
"def matmul(A, B, C):\n",
" \"\"\"Perform square matrix multiplication of C = A * B\n",
" \"\"\"\n",
" i, j = numba.cuda.grid(2)\n",
" if i < C.shape[0] and j < C.shape[1]:\n",
" tmp = 0.\n",
" for k in range(A.shape[1]):\n",
" tmp += A[i, k] * B[k, j]\n",
" C[i, j] = tmp\n"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "2e4c9f63-7ff5-4ce2-9054-2372778d08c1",
"metadata": {},
"outputs": [],
"source": [
"import numpy\n",
"\n",
"A = numpy.array([[1, 2],[3, 4]])\n",
"B = numpy.array([[1, 2],[3, 4]])\n",
"C = numpy.array([[1, 2],[3, 4]])\n",
"\n",
"matmul[4, 4](A, B, C)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "39a4d66c-d6c9-49cf-9d09-12b595cc21a6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[ 7, 2],\n",
" [15, 4]])"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"C"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "db7f9aba-14ff-4630-97f1-a8c452436da3",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python [conda env:gpu]",
"language": "python",
"name": "conda-env-gpu-py"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment