This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # | |
| # https://claude.ai/share/b16220a4-da07-4309-9658-93d28a7c407a | |
| # | |
| #!/bin/sh | |
| if [ -z "$1" ]; then | |
| echo "Usage: $0 <github-issue-url>" >&2 | |
| echo "Example: $0 https://github.com/owner/repo/issues/123" >&2 | |
| exit 1 | |
| fi |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #include <stdio.h> | |
| #include <stdlib.h> | |
| #include <string.h> | |
| #include <unistd.h> | |
| #include <net/if.h> | |
| #include <linux/if_tun.h> | |
| #include <sys/types.h> | |
| #include <sys/socket.h> | |
| #include <sys/ioctl.h> | |
| #include <sys/stat.h> |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ubuntu@mi25:~/llama.cpp (master) $ set -x | |
| ./build/bin/llama-perplexity --n-gpu-layers 100 --split-mode layer -m ~/.cache/llama.cpp/unsloth_gpt-oss-20b-GGUF_gpt-oss-20b-F16.gguf -f ~/polano.txt | |
| printf '\n\n' | |
| ./build/bin/llama-perplexity --n-gpu-layers 100 --split-mode layer -m ~/.cache/llama.cpp/unsloth_gpt-oss-20b-GGUF_gpt-oss-20b-UD-Q8_K_XL.gguf -f ~/polano.txt | |
| set +x | |
| ++ parse_git_branch | |
| ++ git branch --no-color | |
| ++ sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/' | |
| + ./build/bin/llama-perplexity --n-gpu-layers 100 --split-mode layer -m /home/ubuntu/.cache/llama.cpp/unsloth_gpt-oss-20b-GGUF_gpt-oss-20b-F16.gguf -f /home/ubuntu/polano.txt | |
| ggml_cuda_init: GGML_CUDA_FORCE_MMQ: no |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ubuntu@mi25:~/llama.cpp (master) $ ./build/bin/llama-perplexity --n-gpu-layers 100 --split-mode layer -m ~/.cache/llama.cpp/unsloth_gpt-oss-20b-GGUF_gpt-oss-20b-F16.gguf -f ~/polano.txt | |
| printf '\n' | |
| ./build/bin/llama-perplexity --n-gpu-layers 100 --split-mode layer -m ~/.cache/llama.cpp/unsloth_Qwen3-30B-A3B-Instruct-2507-GGUF_Qwen3-30B-A3B-Instruct-2507-UD-Q8_K_XL.gguf -f ~/polano.txt | |
| printf '\n' | |
| ./build/bin/llama-perplexity --n-gpu-layers 100 --split-mode layer -m ~/.cache/llama.cpp/unsloth_gemma-3-27b-it-GGUF_gemma-3-27b-it-UD-Q8_K_XL.gguf -f ~/polano.txt | |
| ggml_cuda_init: GGML_CUDA_FORCE_MMQ: no | |
| ggml_cuda_init: GGML_CUDA_FORCE_CUBLAS: no | |
| ggml_cuda_init: found 4 ROCm devices: | |
| Device 0: Radeon Instinct MI25, gfx900:xnack- (0x900), VMM: no, Wave Size: 64 | |
| Device 1: Radeon Instinct MI25, gfx900:xnack- (0x900), VMM: no, Wave Size: 64 |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ubuntu@mi25:~/llama.cpp (master) $ ./build/bin/llama-bench -p 0 -n 128,256,512 -m ~/.cache/llama.cpp/unsloth_gpt-oss-20b-GGUF_gpt-oss-20b-F16.gguf -m ~/.cache/llama.cpp/unsloth_Qwen3-30B-A3B-Instruct-2507-GGUF_Qwen3-30B-A3B-Instruct-2507-UD-Q8_K_XL.gguf -m ~/.cache/llama.cpp/unsloth_gemma-3-27b-it-GGUF_gemma-3-27b-it-UD-Q8_K_XL.gguf | |
| ggml_cuda_init: GGML_CUDA_FORCE_MMQ: no | |
| ggml_cuda_init: GGML_CUDA_FORCE_CUBLAS: no | |
| ggml_cuda_init: found 4 ROCm devices: | |
| Device 0: Radeon Instinct MI25, gfx900:xnack- (0x900), VMM: no, Wave Size: 64 | |
| Device 1: Radeon Instinct MI25, gfx900:xnack- (0x900), VMM: no, Wave Size: 64 | |
| Device 2: Radeon Instinct MI25, gfx900:xnack- (0x900), VMM: no, Wave Size: 64 | |
| Device 3: Radeon Instinct MI25, gfx900:xnack- (0x900), VMM: no, Wave Size: 64 | |
| | model | size | params | backend | ngl | test | t/s | | |
| | ------------------------------ | ---------: | ---------: | ---------- | --: | --------------: | -------------------: |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| set -g mouse on | |
| set -g terminal-overrides 'xterm*:smcup@:rmcup@' | |
| set -g set-titles on | |
| set -g set-titles-string '#H' | |
| set-window-option -g mode-keys vi |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ubuntu@t120h-k80:~/llama.cpp (master)$ ./build/bin/llama-bench -p 0 -n 128,256,512 \ | |
| > -m ~/.cache/llama.cpp/unsloth_Qwen3-32B-GGUF_Qwen3-32B-Q8_0.gguf \ | |
| > -m ~/.cache/llama.cpp/unsloth_Qwen3-30B-A3B-GGUF_Qwen3-30B-A3B-Q8_0.gguf \ | |
| > -m ~/.cache/llama.cpp/mmns_Qwen3-32B-F16.gguf \ | |
| > -m ~/.cache/llama.cpp/mmns_Qwen3-30B-A3B-F16.gguf \ | |
| > -m ~/.cache/llama.cpp/unsloth_DeepSeek-R1-Distill-Llama-70B-GGUF_DeepSeek-R1-Distill-Llama-70B-Q4_K_M.gguf | |
| ggml_cuda_init: GGML_CUDA_FORCE_MMQ: no | |
| ggml_cuda_init: GGML_CUDA_FORCE_CUBLAS: no | |
| ggml_cuda_init: found 8 CUDA devices: | |
| Device 0: Tesla K80, compute capability 3.7, VMM: yes |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ubuntu@t120h-p100-ubuntu20:~/llama.cpp$ ./build/bin/llama-cli -hf llama-cli -hf unsloth/DeepSeek-R1-Distill-Qwen-32B-GGUF:Q4_K_M -no-cnv --prompt "自己紹介してください" -ngl 65 --split-mode row | |
| ggml_cuda_init: GGML_CUDA_FORCE_MMQ: no | |
| ggml_cuda_init: GGML_CUDA_FORCE_CUBLAS: no | |
| ggml_cuda_init: found 4 CUDA devices: | |
| Device 0: Tesla K80, compute capability 3.7, VMM: yes | |
| Device 1: Tesla K80, compute capability 3.7, VMM: yes | |
| Device 2: Tesla K80, compute capability 3.7, VMM: yes | |
| Device 3: Tesla K80, compute capability 3.7, VMM: yes | |
| build: 4997 (d3f1f0ac) with cc (Ubuntu 9.4.0-1ubuntu1~20.04.2) 9.4.0 for x86_64-linux-gnu | |
| main: llama backend init |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/sh | |
| API_KEY="<YOUR API KEY HERE>" | |
| set -eu | |
| url="${1}" | |
| authorized_url="${url}&token=${API_KEY}" | |
| wget -P models/Stable-diffusion --content-disposition "${authorized_url}" |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # 0 "/home/miminashi/src/openwrt/build_dir/target-arm_cortex-a9+vfpv3-d16_musl_eabi/linux-mvebu_cortexa9/linux-6.6.60/arch/arm/boot/dts/marvell/armada-385-fortinet-fg-50e.dts" | |
| # 0 "<built-in>" | |
| # 0 "<command-line>" | |
| # 1 "/home/miminashi/src/openwrt/build_dir/target-arm_cortex-a9+vfpv3-d16_musl_eabi/linux-mvebu_cortexa9/linux-6.6.60/arch/arm/boot/dts/marvell/armada-385-fortinet-fg-50e.dts" | |
| # 1 "/home/miminashi/src/openwrt/build_dir/target-arm_cortex-a9+vfpv3-d16_musl_eabi/linux-mvebu_cortexa9/linux-6.6.60/arch/arm/boot/dts/marvell/armada-385-fortinet-fg-5xe.dtsi" 1 | |
| # 1 "/home/miminashi/src/openwrt/build_dir/target-arm_cortex-a9+vfpv3-d16_musl_eabi/linux-mvebu_cortexa9/linux-6.6.60/arch/arm/boot/dts/marvell/armada-385-fortinet-fg-xxe.dtsi" 1 |
NewerOlder