diff --git a/Projects_2025/team7/PPT.pptx b/Projects_2025/team7/PPT.pptx
new file mode 100644
index 00000000..ec9decf8
Binary files /dev/null and b/Projects_2025/team7/PPT.pptx differ
diff --git a/Projects_2025/team7/Poster.pptx b/Projects_2025/team7/Poster.pptx
new file mode 100644
index 00000000..bc9895e7
Binary files /dev/null and b/Projects_2025/team7/Poster.pptx differ
diff --git a/Projects_2025/team7/llama.android/.DS_Store b/Projects_2025/team7/llama.android/.DS_Store
new file mode 100644
index 00000000..c401f723
Binary files /dev/null and b/Projects_2025/team7/llama.android/.DS_Store differ
diff --git a/Projects_2025/team7/llama.android/.gitignore b/Projects_2025/team7/llama.android/.gitignore
new file mode 100644
index 00000000..347e252e
--- /dev/null
+++ b/Projects_2025/team7/llama.android/.gitignore
@@ -0,0 +1,33 @@
+# Gradle files
+.gradle/
+build/
+
+# Local configuration file (sdk path, etc)
+local.properties
+
+# Log/OS Files
+*.log
+
+# Android Studio generated files and folders
+captures/
+.externalNativeBuild/
+.cxx/
+*.apk
+output.json
+
+# IntelliJ
+*.iml
+.idea/
+misc.xml
+deploymentTargetDropDown.xml
+render.experimental.xml
+
+# Keystore files
+*.jks
+*.keystore
+
+# Google Services (e.g. APIs or Firebase)
+google-services.json
+
+# Android Profiling
+*.hprof
diff --git a/Projects_2025/team7/llama.android/README.md b/Projects_2025/team7/llama.android/README.md
new file mode 100644
index 00000000..e69de29b
diff --git a/Projects_2025/team7/llama.android/VOSK_SETUP.md b/Projects_2025/team7/llama.android/VOSK_SETUP.md
new file mode 100644
index 00000000..8ca1dbab
--- /dev/null
+++ b/Projects_2025/team7/llama.android/VOSK_SETUP.md
@@ -0,0 +1,99 @@
+# Vosk 离线语音识别设置指南
+
+本应用已修改为使用 Vosk 进行离线语音识别,不再依赖系统的语音识别服务。
+
+## 准备工作
+
+### 1. 下载 Vosk 模型
+
+您需要下载一个 Vosk 语音识别模型。根据您的需求选择:
+
+#### 中文模型(推荐)
+- **小型模型**(约 42 MB):
+ - 下载地址:https://alphacephei.com/vosk/models/vosk-model-small-cn-0.22.zip
+ - 适合资源受限的设备
+
+- **大型模型**(约 1.8 GB):
+ - 下载地址:https://alphacephei.com/vosk/models/vosk-model-cn-0.22.zip
+ - 识别精度更高
+
+#### 英文模型
+- **小型模型**(约 40 MB):
+ - 下载地址:https://alphacephei.com/vosk/models/vosk-model-small-en-us-0.15.zip
+
+- **大型模型**(约 1.8 GB):
+ - 下载地址:https://alphacephei.com/vosk/models/vosk-model-en-us-0.22.zip
+
+### 2. 安装模型到项目
+
+1. 下载并解压您选择的模型文件
+2. 将解压后的模型文件夹重命名为 `model-cn`(如果使用中文)或 `model-en`(如果使用英文)
+3. 将该文件夹复制到项目的 `app/src/main/assets/` 目录下
+
+文件结构应该如下:
+```
+llama.android/
+└── app/
+ └── src/
+ └── main/
+ └── assets/
+ └── model-cn/ # 或 model-en
+ ├── am/
+ ├── conf/
+ ├── graph/
+ └── ivector/
+```
+
+### 3. 修改代码(如果使用英文模型)
+
+如果您使用的是英文模型,需要在 `MainActivity.kt` 中修改模型名称:
+
+找到 `initVoskModel()` 函数中的这一行:
+```kotlin
+val modelPath = StorageService.sync(this, "model-cn")
+```
+
+改为:
+```kotlin
+val modelPath = StorageService.sync(this, "model-en")
+```
+
+## 使用说明
+
+1. 构建并运行应用
+2. 应用启动时会自动加载 Vosk 模型(首次加载可能需要几秒钟)
+3. 查看日志确认模型是否加载成功:"Vosk模型加载成功"
+4. 长按麦克风按钮开始录音,松开按钮结束录音
+5. 录音过程中会实时显示识别的部分结果
+6. 录音结束后显示最终识别结果
+
+## 优势
+
+- ✅ 完全离线工作,不需要网络连接
+- ✅ 不依赖系统语音识别服务
+- ✅ 隐私保护,语音数据不会上传到服务器
+- ✅ 支持多种语言(中文、英文等)
+- ✅ 实时显示部分识别结果
+
+## 故障排除
+
+### 模型加载失败
+- 确认模型文件夹在正确的位置(`app/src/main/assets/model-cn`)
+- 确认模型文件夹内包含必要的子文件夹(am, conf, graph, ivector)
+- 检查应用日志中的错误信息
+
+### 识别效果不佳
+- 尝试使用大型模型以获得更好的识别精度
+- 确保录音环境安静
+- 对着麦克风清晰地说话
+
+### 应用崩溃
+- 检查是否有足够的存储空间
+- 确认已授予麦克风权限
+- 查看崩溃日志获取详细错误信息
+
+## 更多信息
+
+- Vosk 官方网站:https://alphacephei.com/vosk/
+- Vosk Android 文档:https://github.com/alphacep/vosk-android-demo
+- 更多语言模型:https://alphacephei.com/vosk/models
diff --git a/Projects_2025/team7/llama.android/app/.gitignore b/Projects_2025/team7/llama.android/app/.gitignore
new file mode 100644
index 00000000..796b96d1
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/Projects_2025/team7/llama.android/app/build.gradle.kts b/Projects_2025/team7/llama.android/app/build.gradle.kts
new file mode 100644
index 00000000..9403fef6
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/build.gradle.kts
@@ -0,0 +1,70 @@
+plugins {
+ id("com.android.application")
+ id("org.jetbrains.kotlin.android")
+}
+
+android {
+ namespace = "com.example.llama"
+ compileSdk = 34
+
+ defaultConfig {
+ applicationId = "com.example.llama"
+ minSdk = 33
+ targetSdk = 34
+ versionCode = 1
+ versionName = "1.0"
+
+ testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
+ vectorDrawables {
+ useSupportLibrary = true
+ }
+ }
+
+ buildTypes {
+ release {
+ isMinifyEnabled = false
+ proguardFiles(
+ getDefaultProguardFile("proguard-android-optimize.txt"),
+ "proguard-rules.pro"
+ )
+ }
+ }
+ compileOptions {
+ sourceCompatibility = JavaVersion.VERSION_1_8
+ targetCompatibility = JavaVersion.VERSION_1_8
+ }
+ kotlinOptions {
+ jvmTarget = "1.8"
+ }
+ buildFeatures {
+ compose = true
+ }
+ composeOptions {
+ kotlinCompilerExtensionVersion = "1.5.1"
+ }
+}
+
+dependencies {
+
+ implementation("androidx.core:core-ktx:1.12.0")
+ implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.6.2")
+ implementation("androidx.activity:activity-compose:1.8.2")
+ implementation(platform("androidx.compose:compose-bom:2023.08.00"))
+ implementation("androidx.compose.ui:ui")
+ implementation("androidx.compose.ui:ui-graphics")
+ implementation("androidx.compose.ui:ui-tooling-preview")
+ implementation("androidx.compose.material3:material3")
+ implementation("androidx.compose.material:material-icons-extended:1.6.0")
+ implementation(project(":llama"))
+
+ // Vosk 离线语音识别
+ implementation("com.alphacephei:vosk-android:0.3.47")
+
+ testImplementation("junit:junit:4.13.2")
+ androidTestImplementation("androidx.test.ext:junit:1.1.5")
+ androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1")
+ androidTestImplementation(platform("androidx.compose:compose-bom:2023.08.00"))
+ androidTestImplementation("androidx.compose.ui:ui-test-junit4")
+ debugImplementation("androidx.compose.ui:ui-tooling")
+ debugImplementation("androidx.compose.ui:ui-test-manifest")
+}
diff --git a/Projects_2025/team7/llama.android/app/proguard-rules.pro b/Projects_2025/team7/llama.android/app/proguard-rules.pro
new file mode 100644
index 00000000..f1b42451
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/Projects_2025/team7/llama.android/app/src/main/AndroidManifest.xml b/Projects_2025/team7/llama.android/app/src/main/AndroidManifest.xml
new file mode 100644
index 00000000..7324ecf1
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/AndroidManifest.xml
@@ -0,0 +1,52 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/README.md b/Projects_2025/team7/llama.android/app/src/main/assets/README.md
new file mode 100644
index 00000000..6c7766a5
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/README.md
@@ -0,0 +1,30 @@
+# Vosk 模型目录
+
+请将下载的 Vosk 模型文件夹放在此目录下。
+
+## 下载模型
+
+### 中文模型(推荐)
+小型模型(42 MB):https://alphacephei.com/vosk/models/vosk-model-small-cn-0.22.zip
+
+### 英文模型
+小型模型(40 MB):https://alphacephei.com/vosk/models/vosk-model-small-en-us-0.15.zip
+
+## 安装步骤
+
+1. 下载上述模型之一
+2. 解压缩下载的文件
+3. 将解压后的文件夹重命名为 `model-cn`(中文)或 `model-en`(英文)
+4. 将该文件夹复制到此目录(assets)下
+
+最终目录结构应该是:
+```
+assets/
+└── model-cn/ # 或 model-en
+ ├── am/
+ ├── conf/
+ ├── graph/
+ └── ivector/
+```
+
+更多详细信息,请参考项目根目录下的 VOSK_SETUP.md 文件。
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/README b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/README
new file mode 100644
index 00000000..a7f79317
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/README
@@ -0,0 +1,9 @@
+US English model for mobile Vosk applications
+
+Copyright 2020 Alpha Cephei Inc
+
+Accuracy: 10.38 (tedlium test) 9.85 (librispeech test-clean)
+Speed: 0.11xRT (desktop)
+Latency: 0.15s (right context)
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/am/final.mdl b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/am/final.mdl
new file mode 100644
index 00000000..5596b31d
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/am/final.mdl differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/conf/mfcc.conf b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/conf/mfcc.conf
new file mode 100644
index 00000000..eaa40c5b
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/conf/mfcc.conf
@@ -0,0 +1,7 @@
+--sample-frequency=16000
+--use-energy=false
+--num-mel-bins=40
+--num-ceps=40
+--low-freq=20
+--high-freq=7600
+--allow-downsample=true
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/conf/model.conf b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/conf/model.conf
new file mode 100644
index 00000000..9d5b0da3
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/conf/model.conf
@@ -0,0 +1,10 @@
+--min-active=200
+--max-active=3000
+--beam=10.0
+--lattice-beam=2.0
+--acoustic-scale=1.0
+--frame-subsampling-factor=3
+--endpoint.silence-phones=1:2:3:4:5:6:7:8:9:10
+--endpoint.rule2.min-trailing-silence=0.5
+--endpoint.rule3.min-trailing-silence=0.75
+--endpoint.rule4.min-trailing-silence=1.0
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/Gr.fst b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/Gr.fst
new file mode 100644
index 00000000..1f292e63
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/Gr.fst differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/HCLr.fst b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/HCLr.fst
new file mode 100644
index 00000000..9797b262
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/HCLr.fst differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/disambig_tid.int b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/disambig_tid.int
new file mode 100644
index 00000000..762fd5f0
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/disambig_tid.int
@@ -0,0 +1,17 @@
+10015
+10016
+10017
+10018
+10019
+10020
+10021
+10022
+10023
+10024
+10025
+10026
+10027
+10028
+10029
+10030
+10031
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/phones/word_boundary.int b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/phones/word_boundary.int
new file mode 100644
index 00000000..df23fd7c
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/graph/phones/word_boundary.int
@@ -0,0 +1,166 @@
+1 nonword
+2 begin
+3 end
+4 internal
+5 singleton
+6 nonword
+7 begin
+8 end
+9 internal
+10 singleton
+11 begin
+12 end
+13 internal
+14 singleton
+15 begin
+16 end
+17 internal
+18 singleton
+19 begin
+20 end
+21 internal
+22 singleton
+23 begin
+24 end
+25 internal
+26 singleton
+27 begin
+28 end
+29 internal
+30 singleton
+31 begin
+32 end
+33 internal
+34 singleton
+35 begin
+36 end
+37 internal
+38 singleton
+39 begin
+40 end
+41 internal
+42 singleton
+43 begin
+44 end
+45 internal
+46 singleton
+47 begin
+48 end
+49 internal
+50 singleton
+51 begin
+52 end
+53 internal
+54 singleton
+55 begin
+56 end
+57 internal
+58 singleton
+59 begin
+60 end
+61 internal
+62 singleton
+63 begin
+64 end
+65 internal
+66 singleton
+67 begin
+68 end
+69 internal
+70 singleton
+71 begin
+72 end
+73 internal
+74 singleton
+75 begin
+76 end
+77 internal
+78 singleton
+79 begin
+80 end
+81 internal
+82 singleton
+83 begin
+84 end
+85 internal
+86 singleton
+87 begin
+88 end
+89 internal
+90 singleton
+91 begin
+92 end
+93 internal
+94 singleton
+95 begin
+96 end
+97 internal
+98 singleton
+99 begin
+100 end
+101 internal
+102 singleton
+103 begin
+104 end
+105 internal
+106 singleton
+107 begin
+108 end
+109 internal
+110 singleton
+111 begin
+112 end
+113 internal
+114 singleton
+115 begin
+116 end
+117 internal
+118 singleton
+119 begin
+120 end
+121 internal
+122 singleton
+123 begin
+124 end
+125 internal
+126 singleton
+127 begin
+128 end
+129 internal
+130 singleton
+131 begin
+132 end
+133 internal
+134 singleton
+135 begin
+136 end
+137 internal
+138 singleton
+139 begin
+140 end
+141 internal
+142 singleton
+143 begin
+144 end
+145 internal
+146 singleton
+147 begin
+148 end
+149 internal
+150 singleton
+151 begin
+152 end
+153 internal
+154 singleton
+155 begin
+156 end
+157 internal
+158 singleton
+159 begin
+160 end
+161 internal
+162 singleton
+163 begin
+164 end
+165 internal
+166 singleton
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.dubm b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.dubm
new file mode 100644
index 00000000..db789eb9
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.dubm differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.ie b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.ie
new file mode 100644
index 00000000..93737bf7
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.ie differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.mat b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.mat
new file mode 100644
index 00000000..c3ec635b
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/final.mat differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/global_cmvn.stats b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/global_cmvn.stats
new file mode 100644
index 00000000..b9d92efb
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/global_cmvn.stats
@@ -0,0 +1,3 @@
+ [
+ 1.682383e+11 -1.1595e+10 -1.521733e+10 4.32034e+09 -2.257938e+10 -1.969666e+10 -2.559265e+10 -1.535687e+10 -1.276854e+10 -4.494483e+09 -1.209085e+10 -5.64008e+09 -1.134847e+10 -3.419512e+09 -1.079542e+10 -4.145463e+09 -6.637486e+09 -1.11318e+09 -3.479773e+09 -1.245932e+08 -1.386961e+09 6.560655e+07 -2.436518e+08 -4.032432e+07 4.620046e+08 -7.714964e+07 9.551484e+08 -4.119761e+08 8.208582e+08 -7.117156e+08 7.457703e+08 -4.3106e+08 1.202726e+09 2.904036e+08 1.231931e+09 3.629848e+08 6.366939e+08 -4.586172e+08 -5.267629e+08 -3.507819e+08 1.679838e+09
+ 1.741141e+13 8.92488e+11 8.743834e+11 8.848896e+11 1.190313e+12 1.160279e+12 1.300066e+12 1.005678e+12 9.39335e+11 8.089614e+11 7.927041e+11 6.882427e+11 6.444235e+11 5.151451e+11 4.825723e+11 3.210106e+11 2.720254e+11 1.772539e+11 1.248102e+11 6.691599e+10 3.599804e+10 1.207574e+10 1.679301e+09 4.594778e+08 5.821614e+09 1.451758e+10 2.55803e+10 3.43277e+10 4.245286e+10 4.784859e+10 4.988591e+10 4.925451e+10 5.074584e+10 4.9557e+10 4.407876e+10 3.421443e+10 3.138606e+10 2.539716e+10 1.948134e+10 1.381167e+10 0 ]
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/online_cmvn.conf b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/online_cmvn.conf
new file mode 100644
index 00000000..7748a4a4
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/online_cmvn.conf
@@ -0,0 +1 @@
+# configuration file for apply-cmvn-online, used in the script ../local/run_online_decoding.sh
diff --git a/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/splice.conf b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/splice.conf
new file mode 100644
index 00000000..960cd2e4
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/assets/model-en/ivector/splice.conf
@@ -0,0 +1,2 @@
+--left-context=3
+--right-context=3
diff --git a/Projects_2025/team7/llama.android/app/src/main/ic_launcher2-playstore.png b/Projects_2025/team7/llama.android/app/src/main/ic_launcher2-playstore.png
new file mode 100644
index 00000000..73ddba14
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/ic_launcher2-playstore.png differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/Downloadable.kt b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/Downloadable.kt
new file mode 100644
index 00000000..c271c8d5
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/Downloadable.kt
@@ -0,0 +1,123 @@
+package com.example.llama
+
+import android.app.DownloadManager
+import android.net.Uri
+import android.util.Log
+import androidx.compose.foundation.layout.fillMaxWidth
+import androidx.compose.material3.Button
+import androidx.compose.material3.Text
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.getValue
+import androidx.compose.runtime.mutableDoubleStateOf
+import androidx.compose.runtime.mutableStateOf
+import androidx.compose.runtime.remember
+import androidx.compose.runtime.rememberCoroutineScope
+import androidx.compose.runtime.setValue
+import androidx.core.database.getLongOrNull
+import androidx.core.net.toUri
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.launch
+import androidx.compose.ui.Modifier
+import java.io.File
+
+data class Downloadable(val name: String, val source: Uri, val destination: File) {
+ companion object {
+ @JvmStatic
+ private val tag: String? = this::class.qualifiedName
+
+ sealed interface State
+ data object Ready: State
+ data class Downloading(val id: Long): State
+ data class Downloaded(val downloadable: Downloadable): State
+ data class Error(val message: String): State
+
+ @JvmStatic
+ @Composable
+ fun Button(viewModel: MainViewModel, dm: DownloadManager, item: Downloadable) {
+ var status: State by remember {
+ mutableStateOf(
+ if (item.destination.exists()) Downloaded(item)
+ else Ready
+ )
+ }
+ var progress by remember { mutableDoubleStateOf(0.0) }
+
+ val coroutineScope = rememberCoroutineScope()
+
+ suspend fun waitForDownload(result: Downloading, item: Downloadable): State {
+ while (true) {
+ val cursor = dm.query(DownloadManager.Query().setFilterById(result.id))
+
+ if (cursor == null) {
+ Log.e(tag, "dm.query() returned null")
+ return Error("dm.query() returned null")
+ }
+
+ if (!cursor.moveToFirst() || cursor.count < 1) {
+ cursor.close()
+ Log.i(tag, "cursor.moveToFirst() returned false or cursor.count < 1, download canceled?")
+ return Ready
+ }
+
+ val pix = cursor.getColumnIndex(DownloadManager.COLUMN_BYTES_DOWNLOADED_SO_FAR)
+ val tix = cursor.getColumnIndex(DownloadManager.COLUMN_TOTAL_SIZE_BYTES)
+ val sofar = cursor.getLongOrNull(pix) ?: 0
+ val total = cursor.getLongOrNull(tix) ?: 1
+ cursor.close()
+
+ if (sofar == total) {
+ return Downloaded(item)
+ }
+
+ progress = (sofar * 1.0) / total
+
+ delay(1000L)
+ }
+ }
+
+ fun onClick() {
+ when (val s = status) {
+ is Downloaded -> {
+ viewModel.load(item.destination.path)
+ }
+
+ is Downloading -> {
+ coroutineScope.launch {
+ status = waitForDownload(s, item)
+ }
+ }
+
+ else -> {
+ item.destination.delete()
+
+ val request = DownloadManager.Request(item.source).apply {
+ setTitle("Downloading model")
+ setDescription("Downloading model: ${item.name}")
+ setAllowedNetworkTypes(DownloadManager.Request.NETWORK_WIFI)
+ setDestinationUri(item.destination.toUri())
+ }
+
+ viewModel.log("正在保存 ${item.name} to ${item.destination.path}")
+ Log.i(tag, "Saving ${item.name} to ${item.destination.path}")
+
+ val id = dm.enqueue(request)
+ status = Downloading(id)
+ onClick()
+ }
+ }
+ }
+
+ Button(
+ onClick = { onClick() },
+ modifier = Modifier.fillMaxWidth(0.9f),
+ enabled = status !is Downloading) {
+ when (status) {
+ is Downloading -> Text(text = "正在下载 ${(progress * 100).toInt()}%")
+ is Downloaded -> Text("加载 ${item.name}")
+ is Ready -> Text("点击下载 ${item.name}")
+ is Error -> Text("点击下载 ${item.name}")
+ }
+ }
+ }
+ }
+}
diff --git a/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/MainActivity.kt b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/MainActivity.kt
new file mode 100644
index 00000000..fd65d248
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/MainActivity.kt
@@ -0,0 +1,515 @@
+package com.example.llama
+
+import android.Manifest
+import android.app.ActivityManager
+import android.app.DownloadManager
+import android.content.ClipData
+import android.content.ClipboardManager
+import android.content.Intent
+import android.content.pm.PackageManager
+import android.net.Uri
+import android.os.Bundle
+import android.os.StrictMode
+import android.os.StrictMode.VmPolicy
+import android.text.format.Formatter
+import android.media.AudioRecord
+import android.media.AudioFormat
+import android.media.MediaRecorder
+import org.vosk.Model
+import org.vosk.Recognizer
+import org.vosk.android.RecognitionListener as VoskRecognitionListener
+import org.vosk.android.SpeechService
+import org.vosk.android.StorageService
+import org.json.JSONObject
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import androidx.activity.ComponentActivity
+import androidx.activity.compose.setContent
+import androidx.activity.result.contract.ActivityResultContracts
+import androidx.activity.viewModels
+import androidx.compose.foundation.background
+import androidx.compose.foundation.gestures.detectTapGestures
+import androidx.compose.foundation.layout.Arrangement
+import androidx.compose.foundation.layout.Box
+import androidx.compose.foundation.layout.Column
+import androidx.compose.foundation.layout.Row
+import androidx.compose.foundation.layout.Spacer
+import androidx.compose.foundation.layout.fillMaxSize
+import androidx.compose.foundation.layout.fillMaxWidth
+import androidx.compose.foundation.layout.padding
+import androidx.compose.foundation.layout.width
+import androidx.compose.foundation.lazy.LazyColumn
+import androidx.compose.foundation.lazy.items
+import androidx.compose.foundation.lazy.rememberLazyListState
+import androidx.compose.foundation.shape.RoundedCornerShape
+import androidx.compose.material.icons.Icons
+import androidx.compose.material.icons.filled.ArrowUpward
+import androidx.compose.material.icons.filled.Mic
+import androidx.compose.material3.Button
+import androidx.compose.material3.Icon
+import androidx.compose.material3.IconButton
+import androidx.compose.material3.LocalContentColor
+import androidx.compose.material3.MaterialTheme
+import androidx.compose.material3.OutlinedTextField
+import androidx.compose.material3.Surface
+import androidx.compose.material3.Text
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.DisposableEffect
+import androidx.compose.runtime.LaunchedEffect
+import androidx.compose.runtime.getValue
+import androidx.compose.runtime.mutableStateOf
+import androidx.compose.runtime.remember
+import androidx.compose.runtime.setValue
+import androidx.compose.runtime.snapshotFlow
+import androidx.compose.ui.Alignment
+import androidx.compose.ui.Modifier
+import androidx.compose.ui.input.pointer.pointerInput
+import androidx.compose.ui.platform.LocalContext
+import androidx.compose.ui.unit.dp
+import androidx.core.content.ContextCompat
+import androidx.core.content.getSystemService
+import com.example.llama.ui.theme.LlamaAndroidTheme
+import java.io.File
+import androidx.compose.foundation.layout.size
+import androidx.compose.ui.graphics.Color
+import androidx.compose.foundation.layout.statusBarsPadding
+import androidx.compose.material3.OutlinedTextFieldDefaults
+
+class MainActivity(
+ activityManager: ActivityManager? = null,
+ downloadManager: DownloadManager? = null,
+ clipboardManager: ClipboardManager? = null,
+): ComponentActivity() {
+ private val tag: String? = this::class.simpleName
+
+ private val activityManager by lazy { activityManager ?: getSystemService()!! }
+ private val downloadManager by lazy { downloadManager ?: getSystemService()!! }
+ private val clipboardManager by lazy { clipboardManager ?: getSystemService()!! }
+
+ private val viewModel: MainViewModel by viewModels()
+
+ // 麦克风权限
+ // requestPermissionLauncher是一个工具变量,是registerForActivityResult类的实例化
+ // 自带.launch()方法,这个方法接受一个参数,这个参数必须是ActivityResultContracts.RequestPermission()类型,比如Manifest.permission.RECORD_AUDIO就是
+ // 然后requestPermissionLauncher的.launch()方法根据传入的权限类型去弹出弹窗,根据用户是否同意授予权限,去执行回调函数
+ // 这个回调函数的形式是 参数 -> {执行代码},参数其实就是用户是否同意授予权限的一个Boolean值
+ private val requestPermissionLauncher = registerForActivityResult(
+ ActivityResultContracts.RequestPermission()
+ ) { isGranted: Boolean ->
+ if (isGranted) {
+ viewModel.log("麦克风权限已授予")
+ } else {
+ viewModel.log("麦克风权限被拒绝")
+ }
+ }
+
+ // Vosk 语音识别相关
+ private var model: Model? = null
+ private var speechService: SpeechService? = null
+
+ fun checkAndRequestPermission() {
+ when {
+ ContextCompat.checkSelfPermission(
+ this,
+ Manifest.permission.RECORD_AUDIO
+ ) == PackageManager.PERMISSION_GRANTED -> { }
+ else -> {
+ requestPermissionLauncher.launch(Manifest.permission.RECORD_AUDIO)
+ }
+ }
+ }
+
+ // Get a MemoryInfo object for the device's current memory status.
+ private fun availableMemory(): ActivityManager.MemoryInfo {
+ return ActivityManager.MemoryInfo().also { memoryInfo ->
+ activityManager.getMemoryInfo(memoryInfo)
+ }
+ }
+
+ // 1. MainActivity类是程序打开之后运行的第一个类
+ // 1. 并且打开之后对MainActivity实例化之后,执行OnCreate()方法
+ override fun onCreate(savedInstanceState: Bundle?) {
+ // 1.1 不需要关心
+ super.onCreate(savedInstanceState)
+
+ // 1.2 获取麦克风权限
+ checkAndRequestPermission()
+
+ // 1.2 不需要关心
+ StrictMode.setVmPolicy(
+ VmPolicy.Builder(StrictMode.getVmPolicy())
+ .detectLeakedClosableObjects()
+ .build()
+ )
+
+ // 1.3 获取内存情况,并追加在viewModel的messages中
+ val free = Formatter.formatFileSize(this, availableMemory().availMem)
+ val total = Formatter.formatFileSize(this, availableMemory().totalMem)
+ viewModel.log("空闲内存/总内存: $free / $total")
+ viewModel.log("模型下载目录: ${getExternalFilesDir(null)}")
+
+ // 1.3.1 初始化Vosk模型
+ initVoskModel()
+
+ // 1.4 需要下载的模型列表
+ val extFilesDir = getExternalFilesDir(null)
+ val models = listOf(
+ Downloadable(
+ "TinyLlama 1.1B (int8, 1.17 GB)",
+ Uri.parse("https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q8_0.gguf?download=true"), // 本地路径也可以写成 file://
+ File(extFilesDir, "tinyllama-1.1b-chat-v1.0.Q8_0.gguf") // 保存/使用路径
+ ),
+ Downloadable(
+ "Qwen3 4B (int4, 2.38 GB)",
+ Uri.parse("https://huggingface.co/unsloth/Qwen3-4B-GGUF/resolve/main/Qwen3-4B-Q4_0.gguf?download=true"), // 本地路径也可以写成 file://
+ File(extFilesDir, "Qwen3-4B-Q4_0.gguf") // 保存/使用路径
+ ),
+ // Downloadable(
+ // "Qwen2.5 0.5B (fp16, 1.27 GB)",
+ // Uri.parse("https://huggingface.co/Qwen/Qwen2.5-0.5B-Instruct-GGUF/resolve/main/qwen2.5-0.5b-instruct-fp16.gguf?download=true"), // 本地路径也可以写成 file://
+ // File(extFilesDir, "qwen2.5-0.5b-instruct-fp16.gguf") // 保存/使用路径
+ // )
+ )
+
+ // 1.5 SetContent是一个函数
+ // 1.5.1 程序入口是MainActivity,并且一进来实例化之后会马上执行OnCreate()
+ // 1.5.1 一般MainActivity的OnCreate中都会有调用SetContent,因为SetContent之前的代码都是在创建或者声明变量,然后获取一些值,只有SetContent是真正在手机屏幕上展示内容
+ // 1.5.2 func在声明的时候要求传入3个参数func(arg1, arg2, arg3){},arg3是一个函数,然后在Kotlin的语法糖中,可以写成func(arg1, arg2){arg3},原因是arg3的代码可能很长,这样会更好看
+ // 1.5.2 如果func在声明的时候只要求传入1个参数,且这个参数是函数,就可以直接写成func{arg1}
+ // 1.5.2 如果以上情况中所有参数都不是函数,只是一个变量(比如int,double或者某个类的实例),就不可以这样
+ // 1.5.2 SetContent(arg1){}中,arg1就是一个函数,所以可以写成SetContent{arg1}
+
+ // 1.5.3 SetContent(arg1:lambda){},所以写成SetContent{arg1}
+ // 1.5.3 LlamaAndroidTheme(arg1:lambda){},所以写成LlamaAndroidTheme{arg1}
+ // 1.5.3 Surface(arg1, arg2, arg3:lambda){},所以写成Surface(arg1, arg2){arg3}
+ setContent {
+ LlamaAndroidTheme {
+ // A surface container using the 'background' color from the theme
+ // 这个Surface的color是对话框背景的color,所以把colorScheme.background设置成白色
+ Surface(
+ modifier = Modifier.fillMaxSize(),
+ color = MaterialTheme.colorScheme.background
+ ) {
+ // 1.6 arg3:lambda
+ // 1.6.1 需要传入viewModel,剪切板,下载器,模型列表
+ // 1.6.2 arg3的具体声明代码在本类之后
+ MainCompose(
+ viewModel,
+ clipboardManager,
+ downloadManager,
+ models,
+ ::startVoskRecognition,
+ ::stopVoskRecognition
+ )
+ }
+ }
+ }
+ }
+
+ // 初始化Vosk模型
+ private fun initVoskModel() {
+ Thread {
+ try {
+ // viewModel.log("开始加载Vosk模型...")
+ // 将assets中的模型解压到应用的缓存目录
+ val modelDir = File(cacheDir, "model-en")
+
+ // 如果缓存目录中没有模型,则从assets复制
+ if (!modelDir.exists()) {
+ // viewModel.log("首次加载,正在复制模型文件...")
+ modelDir.mkdirs()
+
+ // 复制assets中的所有文件到缓存目录
+ copyAssetFolder("model-en", modelDir.absolutePath)
+ }
+
+ // 使用缓存目录中的模型创建Model对象
+ model = Model(modelDir.absolutePath)
+ // viewModel.log("Vosk模型加载成功")
+ } catch (e: Exception) {
+ viewModel.log("Vosk模型加载失败: ${e.message}")
+ e.printStackTrace()
+ }
+ }.start()
+ }
+
+ // 递归复制assets文件夹到目标路径
+ private fun copyAssetFolder(assetPath: String, targetPath: String) {
+ try {
+ val assetList = assets.list(assetPath) ?: return
+
+ if (assetList.isEmpty()) {
+ // 这是一个文件,复制它
+ assets.open(assetPath).use { input ->
+ File(targetPath).outputStream().use { output ->
+ input.copyTo(output)
+ }
+ }
+ } else {
+ // 这是一个目录,递归复制
+ val targetDir = File(targetPath)
+ targetDir.mkdirs()
+
+ for (asset in assetList) {
+ val assetFilePath = if (assetPath.isEmpty()) asset else "$assetPath/$asset"
+ val targetFilePath = "$targetPath/$asset"
+ copyAssetFolder(assetFilePath, targetFilePath)
+ }
+ }
+ } catch (e: Exception) {
+ viewModel.log("复制资源文件失败: ${e.message}")
+ e.printStackTrace()
+ }
+ }
+
+ // 开始Vosk语音识别
+ private fun startVoskRecognition(onResult: (String) -> Unit, onPartialResult: (String) -> Unit) {
+ if (model == null) {
+ viewModel.log("模型未加载,无法开始识别")
+ return
+ }
+
+ try {
+ val recognizer = Recognizer(model, 16000.0f)
+ speechService = SpeechService(recognizer, 16000.0f)
+
+ speechService?.startListening(object : VoskRecognitionListener {
+ override fun onPartialResult(hypothesis: String?) {
+ hypothesis?.let {
+ try {
+ val jsonObject = JSONObject(it)
+ val partial = jsonObject.optString("partial", "")
+ if (partial.isNotEmpty()) {
+ onPartialResult(partial)
+ }
+ } catch (e: Exception) {
+ viewModel.log("解析部分结果失败: ${e.message}")
+ }
+ }
+ }
+
+ override fun onResult(hypothesis: String?) {
+ hypothesis?.let {
+ try {
+ val jsonObject = JSONObject(it)
+ val text = jsonObject.optString("text", "")
+ if (text.isNotEmpty()) {
+ onResult(text)
+ }
+ } catch (e: Exception) {
+ viewModel.log("解析最终结果失败: ${e.message}")
+ }
+ }
+ }
+
+ override fun onFinalResult(hypothesis: String?) {
+ hypothesis?.let {
+ try {
+ val jsonObject = JSONObject(it)
+ val text = jsonObject.optString("text", "")
+ if (text.isNotEmpty()) {
+ onResult(text)
+ }
+ } catch (e: Exception) {
+ viewModel.log("解析最终结果失败: ${e.message}")
+ }
+ }
+ }
+
+ override fun onError(exception: Exception?) {
+ viewModel.log("识别错误: ${exception?.message}")
+ }
+
+ override fun onTimeout() {
+ viewModel.log("识别超时")
+ }
+ })
+
+ // viewModel.log("开始语音识别...")
+ } catch (e: Exception) {
+ viewModel.log("启动识别失败: ${e.message}")
+ }
+ }
+
+ // 停止Vosk语音识别
+ private fun stopVoskRecognition() {
+ speechService?.stop()
+ speechService?.shutdown()
+ speechService = null
+ }
+
+ override fun onDestroy() {
+ super.onDestroy()
+ stopVoskRecognition()
+ model?.close()
+ }
+}
+
+@Composable
+fun MainCompose(
+ viewModel: MainViewModel,
+ clipboard: ClipboardManager,
+ dm: DownloadManager,
+ models: List,
+ startRecognition: ((String) -> Unit, (String) -> Unit) -> Unit,
+ stopRecognition: () -> Unit
+) {
+ // 1. Column(arg1, arg2, arg3:lambda){}
+ // 1.1 可以写成Column(arg1, arg2){arg3},只不过这个arg3其实可以写很多个按钮实例,最后被打包在一起作为一个arg3整体
+ Column(
+ horizontalAlignment = Alignment.CenterHorizontally, // 整体水平居中
+ modifier = Modifier.fillMaxSize() // 占满整个屏幕
+ ) {
+ // 1. 顶部AppBar
+ Row(
+ verticalAlignment = Alignment.CenterVertically,
+ modifier = Modifier
+ .fillMaxWidth()
+ .background(MaterialTheme.colorScheme.background)
+ .statusBarsPadding()
+ .padding(vertical = 16.dp, horizontal = 16.dp)
+ ) {
+ Box(
+ modifier = Modifier
+ .size(13.dp)
+ .background(MaterialTheme.colorScheme.primary, shape = RoundedCornerShape(6.5.dp))
+ )
+ Spacer(modifier = Modifier.width(8.dp))
+ Text(
+ text = "LlamaBot",
+ color = MaterialTheme.colorScheme.primary,
+ style = MaterialTheme.typography.titleLarge
+ )
+ }
+
+ // 2. 对话内容
+ val scrollState = rememberLazyListState()
+ Box(modifier = Modifier.weight(1f).fillMaxWidth()) {
+ // 1.1 LazyColumn是真正用于显示内容
+ LazyColumn(
+ state = scrollState,
+ modifier = Modifier.fillMaxSize()
+ ) {
+ items(viewModel.messages.size) { index ->
+ val message = viewModel.messages[index]
+ Row(
+ modifier = Modifier
+ .fillMaxWidth()
+ .padding(horizontal = 16.dp, vertical = 4.dp),
+ horizontalArrangement = if (index % 2 == 0) Arrangement.End else Arrangement.Start
+ ) {
+ // 这里设置文字颜色
+ // 为LocalContentColor.current,实际上就是colorScheme.onTertiary,设置成黑色
+ // 然后文字的背景设置为colorScheme.onTertiary,加上0.1透明度
+ Text(
+ text = message,
+ style = MaterialTheme.typography.bodyLarge.copy(color = LocalContentColor.current),
+ modifier = Modifier
+ .background(MaterialTheme.colorScheme.onTertiary.copy(alpha = 0.1f), shape = RoundedCornerShape(8.dp))
+ .padding(8.dp)
+ )
+ }
+ }
+ }
+
+ // 1.2 LaunchedEffect用于计算出对话总长度来自动给滚动到底部
+ LaunchedEffect(viewModel.messages) {
+ snapshotFlow { viewModel.messages.lastOrNull() }
+ .collect { _ ->
+ if (viewModel.messages.isNotEmpty()) {
+ scrollState.animateScrollToItem(viewModel.messages.size - 1)
+ }
+ }
+ }
+ }
+
+ // 3. 用户输入
+ // 3.1 语音识别 - 使用Vosk离线识别
+ var isRecording by remember { mutableStateOf(false) }
+
+ Row(
+ verticalAlignment = Alignment.CenterVertically,
+ modifier = Modifier.fillMaxWidth(0.9f)
+ ){
+ // 3.1 获取用户的输入,并且赋值给viewModel的message属性
+ OutlinedTextField(
+ value = viewModel.message,
+ onValueChange = { viewModel.updateMessage(it) },
+ label = { Text("Message") },
+
+ modifier = Modifier.fillMaxWidth(0.65f), // 可选,限制宽度并居中
+ shape = RoundedCornerShape(24.dp) // 设置圆角
+ )
+ Spacer(modifier = Modifier.width(8.dp))
+
+ Box(
+ modifier = Modifier
+ .size(48.dp)
+ .background(
+ color = if (isRecording) MaterialTheme.colorScheme.onSurfaceVariant else MaterialTheme.colorScheme.secondary,
+ shape = RoundedCornerShape(50)
+ )
+ .pointerInput(Unit) {
+ detectTapGestures(
+ onPress = {
+ // 1. 开始录音 - 使用Vosk
+ isRecording = true
+ // viewModel.log("开始使用Vosk离线语音识别...")
+
+ startRecognition(
+ // onResult - 最终结果
+ { text ->
+ viewModel.updateMessage(text)
+ // viewModel.log("识别完成: $text")
+ },
+ // onPartialResult - 部分结果
+ { partial ->
+ viewModel.updateMessage(partial)
+ }
+ )
+
+ tryAwaitRelease()
+ // 结束录音
+ isRecording = false
+ stopRecognition()
+ // viewModel.log("停止语音识别")
+ }
+ )
+ },
+ contentAlignment = Alignment.Center
+ ) {
+ Icon(
+ imageVector = Icons.Default.Mic,
+ contentDescription = "Voice Input",
+ tint = MaterialTheme.colorScheme.onSecondary
+ )
+ }
+ Spacer(modifier = Modifier.width(8.dp))
+
+ IconButton(
+ onClick = { viewModel.send() },
+ modifier = Modifier
+ .background(
+ color = MaterialTheme.colorScheme.primary,
+ shape = RoundedCornerShape(50)
+ )
+ .size(48.dp)
+ ) {
+ Icon(
+ imageVector = Icons.Default.ArrowUpward,
+ contentDescription = "Send",
+ tint = MaterialTheme.colorScheme.onPrimary
+ )
+ }
+ }
+
+ // 3. 下载按钮
+ Column(
+ horizontalAlignment = Alignment.CenterHorizontally
+ ) {
+ for (model in models) {
+ Downloadable.Button(viewModel, dm, model)
+ }
+ }
+ }
+}
diff --git a/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt
new file mode 100644
index 00000000..363d1d9a
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt
@@ -0,0 +1,85 @@
+package com.example.llama
+
+import android.llama.cpp.LLamaAndroid
+import android.util.Log
+import androidx.compose.runtime.getValue
+import androidx.compose.runtime.mutableStateOf
+import androidx.compose.runtime.setValue
+import androidx.lifecycle.ViewModel
+import androidx.lifecycle.viewModelScope
+import kotlinx.coroutines.flow.catch
+import kotlinx.coroutines.launch
+
+class MainViewModel(private val llamaAndroid: LLamaAndroid = LLamaAndroid.instance()): ViewModel() {
+ companion object {
+ @JvmStatic
+ private val NanosPerSecond = 1_000_000_000.0
+ }
+
+ private val tag: String? = this::class.simpleName
+
+ // 1. 可观察变量
+ // 1.1 就是说var by mutableStateOf(listOf)的本质
+ // 1.1 是声明了一个listOf类型的变量,并且设置初始值为
+ // 1.2 然后通过by mutableStateOf给这个变量新增一个功能,使得这个变量更新的时候,页面上显示这个变量的地方会自动刷新
+ // 1.3 private set
+ // 1.3 private set就是把set方法设置为私有方法
+ // 1.3 不像Java中要在类的方法中专门写set和get方法然后设置public或private,这里直接在变量后面加一个private set就可以,减少代码量
+ var messages by mutableStateOf(listOf("初始化..."))
+ private set
+
+ var message by mutableStateOf("")
+ private set
+
+ override fun onCleared() {
+ super.onCleared()
+
+ viewModelScope.launch {
+ try {
+ llamaAndroid.unload()
+ } catch (exc: IllegalStateException) {
+ messages += exc.message!!
+ }
+ }
+ }
+
+ fun send() {
+ val text = message
+ message = ""
+
+ // Add to messages console.
+ messages += text
+ messages += ""
+
+ viewModelScope.launch {
+ llamaAndroid.send(text)
+ .catch {
+ Log.e(tag, "send() failed", it)
+ messages += it.message!!
+ }
+ .collect { messages = messages.dropLast(1) + (messages.last() + it) }
+ }
+ }
+
+ fun load(pathToModel: String) {
+ viewModelScope.launch {
+ try {
+ llamaAndroid.load(pathToModel)
+ messages += "已加载 $pathToModel"
+ } catch (exc: IllegalStateException) {
+ Log.e(tag, "load() failed", exc)
+ messages += exc.message!!
+ }
+ }
+ }
+
+ fun updateMessage(newMessage: String) {
+ message = newMessage
+ }
+
+ // 1. 追加messages
+ // 1.1 messages是listOf()
+ fun log(message: String) {
+ messages += message
+ }
+}
diff --git a/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt
new file mode 100644
index 00000000..4de35d55
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt
@@ -0,0 +1,7 @@
+package com.example.llama.ui.theme
+
+import androidx.compose.ui.graphics.Color
+
+val Black = Color(0xFF000000)
+val Gray = Color(0xFF808080)
+val White = Color(0xFFFFFFFF)
diff --git a/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt
new file mode 100644
index 00000000..38adda5c
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt
@@ -0,0 +1,74 @@
+package com.example.llama.ui.theme
+
+import android.app.Activity
+import android.os.Build
+import androidx.compose.foundation.isSystemInDarkTheme
+import androidx.compose.material3.MaterialTheme
+import androidx.compose.material3.darkColorScheme
+import androidx.compose.material3.dynamicDarkColorScheme
+import androidx.compose.material3.dynamicLightColorScheme
+import androidx.compose.material3.lightColorScheme
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.SideEffect
+import androidx.compose.ui.graphics.toArgb
+import androidx.compose.ui.platform.LocalContext
+import androidx.compose.ui.platform.LocalView
+import androidx.core.view.WindowCompat
+
+private val DarkColorScheme = darkColorScheme(
+ // 1. 状态栏 + 对话框背景
+ background = Black,
+
+ // 2. 对话文字颜色 + 消息文字颜色 + 消息框颜色
+ onTertiary = White,
+ onSurfaceVariant = White.copy(alpha = 0.72f),
+ outline = White.copy(alpha = 0.72f),
+
+ // 3. 按钮颜色 + 按钮文案颜色
+ primary = White,
+ onPrimary = Black,
+
+ // 3. 其他颜色
+ secondary = Gray,
+ tertiary = Black
+)
+
+private val LightColorScheme = lightColorScheme(
+ // 1. 状态栏 + 对话框背景
+ background = White,
+
+ // 2. 对话文字颜色 + 消息文字颜色 + 消息框颜色
+ onTertiary = Black,
+ onSurfaceVariant = Black.copy(alpha = 0.72f),
+ outline = Black.copy(alpha = 0.72f),
+
+ // 3. 按钮颜色 + 按钮文案颜色
+ primary = Black,
+ onPrimary = White,
+
+ // 3. 其他颜色
+ secondary = Gray,
+ tertiary = White
+)
+
+@Composable
+fun LlamaAndroidTheme(
+ darkTheme: Boolean = isSystemInDarkTheme(),
+ content: @Composable () -> Unit
+) {
+ val colorScheme = if (darkTheme) DarkColorScheme else LightColorScheme
+ val view = LocalView.current
+ if (!view.isInEditMode) {
+ SideEffect {
+ val window = (view.context as Activity).window
+ window.statusBarColor = colorScheme.background.toArgb()
+ WindowCompat.getInsetsController(window, view).isAppearanceLightStatusBars = !darkTheme
+ }
+ }
+
+ MaterialTheme(
+ colorScheme = colorScheme,
+ typography = Typography,
+ content = content
+ )
+}
diff --git a/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt
new file mode 100644
index 00000000..0b87946c
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt
@@ -0,0 +1,34 @@
+package com.example.llama.ui.theme
+
+import androidx.compose.material3.Typography
+import androidx.compose.ui.text.TextStyle
+import androidx.compose.ui.text.font.FontFamily
+import androidx.compose.ui.text.font.FontWeight
+import androidx.compose.ui.unit.sp
+
+// Set of Material typography styles to start with
+val Typography = Typography(
+ bodyLarge = TextStyle(
+ fontFamily = FontFamily.Default,
+ fontWeight = FontWeight.Normal,
+ fontSize = 16.sp,
+ lineHeight = 24.sp,
+ letterSpacing = 0.5.sp
+ )
+ /* Other default text styles to override
+ titleLarge = TextStyle(
+ fontFamily = FontFamily.Default,
+ fontWeight = FontWeight.Normal,
+ fontSize = 22.sp,
+ lineHeight = 28.sp,
+ letterSpacing = 0.sp
+ ),
+ labelSmall = TextStyle(
+ fontFamily = FontFamily.Default,
+ fontWeight = FontWeight.Medium,
+ fontSize = 11.sp,
+ lineHeight = 16.sp,
+ letterSpacing = 0.5.sp
+ )
+ */
+)
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher2_background.xml b/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher2_background.xml
new file mode 100644
index 00000000..ca3826a4
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher2_background.xml
@@ -0,0 +1,74 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher_background.xml b/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher_background.xml
new file mode 100644
index 00000000..07d5da9c
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,170 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml b/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml
new file mode 100644
index 00000000..7706ab9e
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi-v26/ic_launcher2.xml b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi-v26/ic_launcher2.xml
new file mode 100644
index 00000000..a1809a08
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi-v26/ic_launcher2.xml
@@ -0,0 +1,5 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi-v26/ic_launcher2_round.xml b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi-v26/ic_launcher2_round.xml
new file mode 100644
index 00000000..a1809a08
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi-v26/ic_launcher2_round.xml
@@ -0,0 +1,5 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml
new file mode 100644
index 00000000..b3e26b4c
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml
new file mode 100644
index 00000000..b3e26b4c
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp
new file mode 100644
index 00000000..c209e78e
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2.webp
new file mode 100644
index 00000000..d2f6fa5e
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2_foreground.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2_foreground.webp
new file mode 100644
index 00000000..adebdadf
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2_foreground.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2_round.webp
new file mode 100644
index 00000000..9a444fbc
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher2_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp
new file mode 100644
index 00000000..b2dfe3d1
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp
new file mode 100644
index 00000000..4f0f1d64
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2.webp
new file mode 100644
index 00000000..2bf007a9
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2_foreground.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2_foreground.webp
new file mode 100644
index 00000000..04dc8519
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2_foreground.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2_round.webp
new file mode 100644
index 00000000..b11066c8
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher2_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp
new file mode 100644
index 00000000..62b611da
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp
new file mode 100644
index 00000000..948a3070
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2.webp
new file mode 100644
index 00000000..6fca9161
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2_foreground.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2_foreground.webp
new file mode 100644
index 00000000..38252cee
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2_foreground.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2_round.webp
new file mode 100644
index 00000000..33443a37
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher2_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp
new file mode 100644
index 00000000..1b9a6956
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp
new file mode 100644
index 00000000..28d4b77f
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2.webp
new file mode 100644
index 00000000..ec1f9ffa
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2_foreground.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2_foreground.webp
new file mode 100644
index 00000000..9fa9479f
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2_foreground.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2_round.webp
new file mode 100644
index 00000000..caba4e7f
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher2_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp
new file mode 100644
index 00000000..9287f508
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp
new file mode 100644
index 00000000..aa7d6427
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2.webp
new file mode 100644
index 00000000..ca3f0216
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2_foreground.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2_foreground.webp
new file mode 100644
index 00000000..2c9e8c74
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2_foreground.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2_round.webp
new file mode 100644
index 00000000..61f1e955
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher2_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp
new file mode 100644
index 00000000..9126ae37
Binary files /dev/null and b/Projects_2025/team7/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp differ
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/values/colors.xml b/Projects_2025/team7/llama.android/app/src/main/res/values/colors.xml
new file mode 100644
index 00000000..ca1931bc
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/values/colors.xml
@@ -0,0 +1,10 @@
+
+
+ #FFBB86FC
+ #FF6200EE
+ #FF3700B3
+ #FF03DAC5
+ #FF018786
+ #FF000000
+ #FFFFFFFF
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/values/strings.xml b/Projects_2025/team7/llama.android/app/src/main/res/values/strings.xml
new file mode 100644
index 00000000..36ffe4b6
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/values/strings.xml
@@ -0,0 +1,3 @@
+
+ LlamaBot
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/values/themes.xml b/Projects_2025/team7/llama.android/app/src/main/res/values/themes.xml
new file mode 100644
index 00000000..8a24fda5
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/values/themes.xml
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/xml/backup_rules.xml b/Projects_2025/team7/llama.android/app/src/main/res/xml/backup_rules.xml
new file mode 100644
index 00000000..148c18b6
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/xml/backup_rules.xml
@@ -0,0 +1,13 @@
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/app/src/main/res/xml/data_extraction_rules.xml b/Projects_2025/team7/llama.android/app/src/main/res/xml/data_extraction_rules.xml
new file mode 100644
index 00000000..0c4f95ca
--- /dev/null
+++ b/Projects_2025/team7/llama.android/app/src/main/res/xml/data_extraction_rules.xml
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/build.gradle.kts b/Projects_2025/team7/llama.android/build.gradle.kts
new file mode 100644
index 00000000..acd1ada7
--- /dev/null
+++ b/Projects_2025/team7/llama.android/build.gradle.kts
@@ -0,0 +1,6 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+plugins {
+ id("com.android.application") version "8.2.0" apply false
+ id("org.jetbrains.kotlin.android") version "1.9.0" apply false
+ id("com.android.library") version "8.2.0" apply false
+}
diff --git a/Projects_2025/team7/llama.android/gradle.properties b/Projects_2025/team7/llama.android/gradle.properties
new file mode 100644
index 00000000..2cbd6d19
--- /dev/null
+++ b/Projects_2025/team7/llama.android/gradle.properties
@@ -0,0 +1,23 @@
+# Project-wide Gradle settings.
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
+# AndroidX package structure to make it clearer which packages are bundled with the
+# Android operating system, and which are packaged with your app's APK
+# https://developer.android.com/topic/libraries/support-library/androidx-rn
+android.useAndroidX=true
+# Kotlin code style for this project: "official" or "obsolete":
+kotlin.code.style=official
+# Enables namespacing of each library's R class so that its R class includes only the
+# resources declared in the library itself and none from the library's dependencies,
+# thereby reducing the size of the R class for that library
+android.nonTransitiveRClass=true
diff --git a/Projects_2025/team7/llama.android/gradle/wrapper/gradle-wrapper.jar b/Projects_2025/team7/llama.android/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 00000000..e708b1c0
Binary files /dev/null and b/Projects_2025/team7/llama.android/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/Projects_2025/team7/llama.android/gradle/wrapper/gradle-wrapper.properties b/Projects_2025/team7/llama.android/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 00000000..a3958c14
--- /dev/null
+++ b/Projects_2025/team7/llama.android/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Thu Dec 21 14:31:09 AEDT 2023
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/Projects_2025/team7/llama.android/gradlew b/Projects_2025/team7/llama.android/gradlew
new file mode 100755
index 00000000..4f906e0c
--- /dev/null
+++ b/Projects_2025/team7/llama.android/gradlew
@@ -0,0 +1,185 @@
+#!/usr/bin/env sh
+
+#
+# Copyright 2015 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin or MSYS, switch paths to Windows format before running java
+if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=`expr $i + 1`
+ done
+ case $i in
+ 0) set -- ;;
+ 1) set -- "$args0" ;;
+ 2) set -- "$args0" "$args1" ;;
+ 3) set -- "$args0" "$args1" "$args2" ;;
+ 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=`save "$@"`
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+exec "$JAVACMD" "$@"
diff --git a/Projects_2025/team7/llama.android/llama/.DS_Store b/Projects_2025/team7/llama.android/llama/.DS_Store
new file mode 100644
index 00000000..74e6c6d5
Binary files /dev/null and b/Projects_2025/team7/llama.android/llama/.DS_Store differ
diff --git a/Projects_2025/team7/llama.android/llama/.gitignore b/Projects_2025/team7/llama.android/llama/.gitignore
new file mode 100644
index 00000000..796b96d1
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/Projects_2025/team7/llama.android/llama/build.gradle.kts b/Projects_2025/team7/llama.android/llama/build.gradle.kts
new file mode 100644
index 00000000..5bb64780
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/build.gradle.kts
@@ -0,0 +1,71 @@
+plugins {
+ id("com.android.library")
+ id("org.jetbrains.kotlin.android")
+}
+
+android {
+ namespace = "android.llama.cpp"
+ compileSdk = 34
+
+ defaultConfig {
+ minSdk = 33
+
+ testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
+ consumerProguardFiles("consumer-rules.pro")
+ ndk {
+ // Add NDK properties if wanted, e.g.
+ // abiFilters += listOf("arm64-v8a")
+ }
+ externalNativeBuild {
+ cmake {
+ arguments += "-DLLAMA_CURL=OFF"
+ arguments += "-DLLAMA_BUILD_COMMON=ON"
+ arguments += "-DGGML_LLAMAFILE=OFF"
+ arguments += "-DCMAKE_BUILD_TYPE=Release"
+ cppFlags += listOf()
+ arguments += listOf()
+
+ cppFlags("")
+ }
+ }
+ }
+
+ buildTypes {
+ release {
+ isMinifyEnabled = false
+ proguardFiles(
+ getDefaultProguardFile("proguard-android-optimize.txt"),
+ "proguard-rules.pro"
+ )
+ }
+ }
+ externalNativeBuild {
+ cmake {
+ path("src/main/cpp/CMakeLists.txt")
+ version = "3.22.1"
+ }
+ }
+ compileOptions {
+ sourceCompatibility = JavaVersion.VERSION_1_8
+ targetCompatibility = JavaVersion.VERSION_1_8
+ }
+ kotlinOptions {
+ jvmTarget = "1.8"
+ }
+
+ packaging {
+ resources {
+ excludes += "/META-INF/{AL2.0,LGPL2.1}"
+ }
+ }
+}
+
+dependencies {
+
+ implementation("androidx.core:core-ktx:1.12.0")
+ implementation("androidx.appcompat:appcompat:1.6.1")
+ implementation("com.google.android.material:material:1.11.0")
+ testImplementation("junit:junit:4.13.2")
+ androidTestImplementation("androidx.test.ext:junit:1.1.5")
+ androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1")
+}
diff --git a/Projects_2025/team7/llama.android/llama/consumer-rules.pro b/Projects_2025/team7/llama.android/llama/consumer-rules.pro
new file mode 100644
index 00000000..e69de29b
diff --git a/Projects_2025/team7/llama.android/llama/proguard-rules.pro b/Projects_2025/team7/llama.android/llama/proguard-rules.pro
new file mode 100644
index 00000000..f1b42451
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/Projects_2025/team7/llama.android/llama/src/androidTest/java/android/llama/cpp/ExampleInstrumentedTest.kt b/Projects_2025/team7/llama.android/llama/src/androidTest/java/android/llama/cpp/ExampleInstrumentedTest.kt
new file mode 100644
index 00000000..05d6ab5d
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/src/androidTest/java/android/llama/cpp/ExampleInstrumentedTest.kt
@@ -0,0 +1,24 @@
+package android.llama.cpp
+
+import androidx.test.platform.app.InstrumentationRegistry
+import androidx.test.ext.junit.runners.AndroidJUnit4
+
+import org.junit.Test
+import org.junit.runner.RunWith
+
+import org.junit.Assert.*
+
+/**
+ * Instrumented test, which will execute on an Android device.
+ *
+ * See [testing documentation](http://d.android.com/tools/testing).
+ */
+@RunWith(AndroidJUnit4::class)
+class ExampleInstrumentedTest {
+ @Test
+ fun useAppContext() {
+ // Context of the app under test.
+ val appContext = InstrumentationRegistry.getInstrumentation().targetContext
+ assertEquals("android.llama.cpp.test", appContext.packageName)
+ }
+}
diff --git a/Projects_2025/team7/llama.android/llama/src/main/AndroidManifest.xml b/Projects_2025/team7/llama.android/llama/src/main/AndroidManifest.xml
new file mode 100644
index 00000000..8bdb7e14
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/src/main/AndroidManifest.xml
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/Projects_2025/team7/llama.android/llama/src/main/cpp/CMakeLists.txt b/Projects_2025/team7/llama.android/llama/src/main/cpp/CMakeLists.txt
new file mode 100644
index 00000000..6119fe09
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/src/main/cpp/CMakeLists.txt
@@ -0,0 +1,53 @@
+# For more information about using CMake with Android Studio, read the
+# documentation: https://d.android.com/studio/projects/add-native-code.html.
+# For more examples on how to use CMake, see https://github.com/android/ndk-samples.
+
+# Sets the minimum CMake version required for this project.
+cmake_minimum_required(VERSION 3.22.1)
+
+# Declares the project name. The project name can be accessed via ${ PROJECT_NAME},
+# Since this is the top level CMakeLists.txt, the project name is also accessible
+# with ${CMAKE_PROJECT_NAME} (both CMake variables are in-sync within the top level
+# build script scope).
+project("llama-android")
+
+#include(FetchContent)
+#FetchContent_Declare(
+# llama
+# GIT_REPOSITORY https://github.com/ggml-org/llama.cpp
+# GIT_TAG master
+#)
+
+# Also provides "common"
+#FetchContent_MakeAvailable(llama)
+
+# Creates and names a library, sets it as either STATIC
+# or SHARED, and provides the relative paths to its source code.
+# You can define multiple libraries, and CMake builds them for you.
+# Gradle automatically packages shared libraries with your APK.
+#
+# In this top level CMakeLists.txt, ${CMAKE_PROJECT_NAME} is used to define
+# the target library name; in the sub-module's CMakeLists.txt, ${PROJECT_NAME}
+# is preferred for the same purpose.
+#
+
+#load local llama.cpp
+add_subdirectory(../../../../../../ build-llama)
+
+# In order to load a library into your app from Java/Kotlin, you must call
+# System.loadLibrary() and pass the name of the library defined here;
+# for GameActivity/NativeActivity derived applications, the same library name must be
+# used in the AndroidManifest.xml file.
+add_library(${CMAKE_PROJECT_NAME} SHARED
+ # List C/C++ source files with relative paths to this CMakeLists.txt.
+ llama-android.cpp)
+
+# Specifies libraries CMake should link to your target library. You
+# can link libraries from various origins, such as libraries defined in this
+# build script, prebuilt third-party libraries, or Android system libraries.
+target_link_libraries(${CMAKE_PROJECT_NAME}
+ # List libraries link to the target library
+ llama
+ common
+ android
+ log)
diff --git a/Projects_2025/team7/llama.android/llama/src/main/cpp/llama-android.cpp b/Projects_2025/team7/llama.android/llama/src/main/cpp/llama-android.cpp
new file mode 100644
index 00000000..711ddc5d
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/src/main/cpp/llama-android.cpp
@@ -0,0 +1,452 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include "llama.h"
+#include "common.h"
+
+// Write C++ code here.
+//
+// Do not forget to dynamically load the C++ library into your application.
+//
+// For instance,
+//
+// In MainActivity.java:
+// static {
+// System.loadLibrary("llama-android");
+// }
+//
+// Or, in MainActivity.kt:
+// companion object {
+// init {
+// System.loadLibrary("llama-android")
+// }
+// }
+
+#define TAG "llama-android.cpp"
+#define LOGi(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
+#define LOGe(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
+
+jclass la_int_var;
+jmethodID la_int_var_value;
+jmethodID la_int_var_inc;
+
+std::string cached_token_chars;
+
+bool is_valid_utf8(const char * string) {
+ if (!string) {
+ return true;
+ }
+
+ const unsigned char * bytes = (const unsigned char *)string;
+ int num;
+
+ while (*bytes != 0x00) {
+ if ((*bytes & 0x80) == 0x00) {
+ // U+0000 to U+007F
+ num = 1;
+ } else if ((*bytes & 0xE0) == 0xC0) {
+ // U+0080 to U+07FF
+ num = 2;
+ } else if ((*bytes & 0xF0) == 0xE0) {
+ // U+0800 to U+FFFF
+ num = 3;
+ } else if ((*bytes & 0xF8) == 0xF0) {
+ // U+10000 to U+10FFFF
+ num = 4;
+ } else {
+ return false;
+ }
+
+ bytes += 1;
+ for (int i = 1; i < num; ++i) {
+ if ((*bytes & 0xC0) != 0x80) {
+ return false;
+ }
+ bytes += 1;
+ }
+ }
+
+ return true;
+}
+
+static void log_callback(ggml_log_level level, const char * fmt, void * data) {
+ if (level == GGML_LOG_LEVEL_ERROR) __android_log_print(ANDROID_LOG_ERROR, TAG, fmt, data);
+ else if (level == GGML_LOG_LEVEL_INFO) __android_log_print(ANDROID_LOG_INFO, TAG, fmt, data);
+ else if (level == GGML_LOG_LEVEL_WARN) __android_log_print(ANDROID_LOG_WARN, TAG, fmt, data);
+ else __android_log_print(ANDROID_LOG_DEFAULT, TAG, fmt, data);
+}
+
+extern "C"
+JNIEXPORT jlong JNICALL
+Java_android_llama_cpp_LLamaAndroid_load_1model(JNIEnv *env, jobject, jstring filename) {
+ llama_model_params model_params = llama_model_default_params();
+
+ auto path_to_model = env->GetStringUTFChars(filename, 0);
+ LOGi("Loading model from %s", path_to_model);
+
+ auto model = llama_model_load_from_file(path_to_model, model_params);
+ env->ReleaseStringUTFChars(filename, path_to_model);
+
+ if (!model) {
+ LOGe("load_model() failed");
+ env->ThrowNew(env->FindClass("java/lang/IllegalStateException"), "load_model() failed");
+ return 0;
+ }
+
+ return reinterpret_cast(model);
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_free_1model(JNIEnv *, jobject, jlong model) {
+ llama_model_free(reinterpret_cast(model));
+}
+
+extern "C"
+JNIEXPORT jlong JNICALL
+Java_android_llama_cpp_LLamaAndroid_new_1context(JNIEnv *env, jobject, jlong jmodel) {
+ auto model = reinterpret_cast(jmodel);
+
+ if (!model) {
+ LOGe("new_context(): model cannot be null");
+ env->ThrowNew(env->FindClass("java/lang/IllegalArgumentException"), "Model cannot be null");
+ return 0;
+ }
+
+ int n_threads = std::max(1, std::min(8, (int) sysconf(_SC_NPROCESSORS_ONLN) - 2));
+ LOGi("Using %d threads", n_threads);
+
+ llama_context_params ctx_params = llama_context_default_params();
+
+ ctx_params.n_ctx = 2048;
+ ctx_params.n_threads = n_threads;
+ ctx_params.n_threads_batch = n_threads;
+
+ llama_context * context = llama_new_context_with_model(model, ctx_params);
+
+ if (!context) {
+ LOGe("llama_new_context_with_model() returned null)");
+ env->ThrowNew(env->FindClass("java/lang/IllegalStateException"),
+ "llama_new_context_with_model() returned null)");
+ return 0;
+ }
+
+ return reinterpret_cast(context);
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_free_1context(JNIEnv *, jobject, jlong context) {
+ llama_free(reinterpret_cast(context));
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_backend_1free(JNIEnv *, jobject) {
+ llama_backend_free();
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_log_1to_1android(JNIEnv *, jobject) {
+ llama_log_set(log_callback, NULL);
+}
+
+extern "C"
+JNIEXPORT jstring JNICALL
+Java_android_llama_cpp_LLamaAndroid_bench_1model(
+ JNIEnv *env,
+ jobject,
+ jlong context_pointer,
+ jlong model_pointer,
+ jlong batch_pointer,
+ jint pp,
+ jint tg,
+ jint pl,
+ jint nr
+ ) {
+ auto pp_avg = 0.0;
+ auto tg_avg = 0.0;
+ auto pp_std = 0.0;
+ auto tg_std = 0.0;
+
+ const auto context = reinterpret_cast(context_pointer);
+ const auto model = reinterpret_cast(model_pointer);
+ const auto batch = reinterpret_cast(batch_pointer);
+
+ const int n_ctx = llama_n_ctx(context);
+
+ LOGi("n_ctx = %d", n_ctx);
+
+ int i, j;
+ int nri;
+ for (nri = 0; nri < nr; nri++) {
+ LOGi("Benchmark prompt processing (pp)");
+
+ common_batch_clear(*batch);
+
+ const int n_tokens = pp;
+ for (i = 0; i < n_tokens; i++) {
+ common_batch_add(*batch, 0, i, { 0 }, false);
+ }
+
+ batch->logits[batch->n_tokens - 1] = true;
+ llama_memory_clear(llama_get_memory(context), false);
+
+ const auto t_pp_start = ggml_time_us();
+ if (llama_decode(context, *batch) != 0) {
+ LOGi("llama_decode() failed during prompt processing");
+ }
+ const auto t_pp_end = ggml_time_us();
+
+ // bench text generation
+
+ LOGi("Benchmark text generation (tg)");
+
+ llama_memory_clear(llama_get_memory(context), false);
+ const auto t_tg_start = ggml_time_us();
+ for (i = 0; i < tg; i++) {
+
+ common_batch_clear(*batch);
+ for (j = 0; j < pl; j++) {
+ common_batch_add(*batch, 0, i, { j }, true);
+ }
+
+ LOGi("llama_decode() text generation: %d", i);
+ if (llama_decode(context, *batch) != 0) {
+ LOGi("llama_decode() failed during text generation");
+ }
+ }
+
+ const auto t_tg_end = ggml_time_us();
+
+ llama_memory_clear(llama_get_memory(context), false);
+
+ const auto t_pp = double(t_pp_end - t_pp_start) / 1000000.0;
+ const auto t_tg = double(t_tg_end - t_tg_start) / 1000000.0;
+
+ const auto speed_pp = double(pp) / t_pp;
+ const auto speed_tg = double(pl * tg) / t_tg;
+
+ pp_avg += speed_pp;
+ tg_avg += speed_tg;
+
+ pp_std += speed_pp * speed_pp;
+ tg_std += speed_tg * speed_tg;
+
+ LOGi("pp %f t/s, tg %f t/s", speed_pp, speed_tg);
+ }
+
+ pp_avg /= double(nr);
+ tg_avg /= double(nr);
+
+ if (nr > 1) {
+ pp_std = sqrt(pp_std / double(nr - 1) - pp_avg * pp_avg * double(nr) / double(nr - 1));
+ tg_std = sqrt(tg_std / double(nr - 1) - tg_avg * tg_avg * double(nr) / double(nr - 1));
+ } else {
+ pp_std = 0;
+ tg_std = 0;
+ }
+
+ char model_desc[128];
+ llama_model_desc(model, model_desc, sizeof(model_desc));
+
+ const auto model_size = double(llama_model_size(model)) / 1024.0 / 1024.0 / 1024.0;
+ const auto model_n_params = double(llama_model_n_params(model)) / 1e9;
+
+ const auto backend = "(Android)"; // TODO: What should this be?
+
+ std::stringstream result;
+ result << std::setprecision(2);
+ result << "| model | size | params | backend | test | t/s |\n";
+ result << "| --- | --- | --- | --- | --- | --- |\n";
+ result << "| " << model_desc << " | " << model_size << "GiB | " << model_n_params << "B | " << backend << " | pp " << pp << " | " << pp_avg << " ± " << pp_std << " |\n";
+ result << "| " << model_desc << " | " << model_size << "GiB | " << model_n_params << "B | " << backend << " | tg " << tg << " | " << tg_avg << " ± " << tg_std << " |\n";
+
+ return env->NewStringUTF(result.str().c_str());
+}
+
+extern "C"
+JNIEXPORT jlong JNICALL
+Java_android_llama_cpp_LLamaAndroid_new_1batch(JNIEnv *, jobject, jint n_tokens, jint embd, jint n_seq_max) {
+
+ // Source: Copy of llama.cpp:llama_batch_init but heap-allocated.
+
+ llama_batch *batch = new llama_batch {
+ 0,
+ nullptr,
+ nullptr,
+ nullptr,
+ nullptr,
+ nullptr,
+ nullptr,
+ };
+
+ if (embd) {
+ batch->embd = (float *) malloc(sizeof(float) * n_tokens * embd);
+ } else {
+ batch->token = (llama_token *) malloc(sizeof(llama_token) * n_tokens);
+ }
+
+ batch->pos = (llama_pos *) malloc(sizeof(llama_pos) * n_tokens);
+ batch->n_seq_id = (int32_t *) malloc(sizeof(int32_t) * n_tokens);
+ batch->seq_id = (llama_seq_id **) malloc(sizeof(llama_seq_id *) * n_tokens);
+ for (int i = 0; i < n_tokens; ++i) {
+ batch->seq_id[i] = (llama_seq_id *) malloc(sizeof(llama_seq_id) * n_seq_max);
+ }
+ batch->logits = (int8_t *) malloc(sizeof(int8_t) * n_tokens);
+
+ return reinterpret_cast(batch);
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_free_1batch(JNIEnv *, jobject, jlong batch_pointer) {
+ //llama_batch_free(*reinterpret_cast(batch_pointer));
+ const auto batch = reinterpret_cast(batch_pointer);
+ delete batch;
+}
+
+extern "C"
+JNIEXPORT jlong JNICALL
+Java_android_llama_cpp_LLamaAndroid_new_1sampler(JNIEnv *, jobject) {
+ auto sparams = llama_sampler_chain_default_params();
+ sparams.no_perf = true;
+ llama_sampler * smpl = llama_sampler_chain_init(sparams);
+ llama_sampler_chain_add(smpl, llama_sampler_init_greedy());
+
+ return reinterpret_cast(smpl);
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_free_1sampler(JNIEnv *, jobject, jlong sampler_pointer) {
+ llama_sampler_free(reinterpret_cast(sampler_pointer));
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_backend_1init(JNIEnv *, jobject) {
+ llama_backend_init();
+}
+
+extern "C"
+JNIEXPORT jstring JNICALL
+Java_android_llama_cpp_LLamaAndroid_system_1info(JNIEnv *env, jobject) {
+ return env->NewStringUTF(llama_print_system_info());
+}
+
+extern "C"
+JNIEXPORT jint JNICALL
+Java_android_llama_cpp_LLamaAndroid_completion_1init(
+ JNIEnv *env,
+ jobject,
+ jlong context_pointer,
+ jlong batch_pointer,
+ jstring jtext,
+ jboolean format_chat,
+ jint n_len
+ ) {
+
+ cached_token_chars.clear();
+
+ const auto text = env->GetStringUTFChars(jtext, 0);
+ const auto context = reinterpret_cast(context_pointer);
+ const auto batch = reinterpret_cast(batch_pointer);
+
+ bool parse_special = (format_chat == JNI_TRUE);
+ const auto tokens_list = common_tokenize(context, text, true, parse_special);
+
+ auto n_ctx = llama_n_ctx(context);
+ auto n_kv_req = tokens_list.size() + n_len;
+
+ LOGi("n_len = %d, n_ctx = %d, n_kv_req = %d", n_len, n_ctx, n_kv_req);
+
+ if (n_kv_req > n_ctx) {
+ LOGe("error: n_kv_req > n_ctx, the required KV cache size is not big enough");
+ }
+
+ for (auto id : tokens_list) {
+ LOGi("token: `%s`-> %d ", common_token_to_piece(context, id).c_str(), id);
+ }
+
+ common_batch_clear(*batch);
+
+ // evaluate the initial prompt
+ for (auto i = 0; i < tokens_list.size(); i++) {
+ common_batch_add(*batch, tokens_list[i], i, { 0 }, false);
+ }
+
+ // llama_decode will output logits only for the last token of the prompt
+ batch->logits[batch->n_tokens - 1] = true;
+
+ if (llama_decode(context, *batch) != 0) {
+ LOGe("llama_decode() failed");
+ }
+
+ env->ReleaseStringUTFChars(jtext, text);
+
+ return batch->n_tokens;
+}
+
+extern "C"
+JNIEXPORT jstring JNICALL
+Java_android_llama_cpp_LLamaAndroid_completion_1loop(
+ JNIEnv * env,
+ jobject,
+ jlong context_pointer,
+ jlong batch_pointer,
+ jlong sampler_pointer,
+ jint n_len,
+ jobject intvar_ncur
+) {
+ const auto context = reinterpret_cast(context_pointer);
+ const auto batch = reinterpret_cast(batch_pointer);
+ const auto sampler = reinterpret_cast(sampler_pointer);
+ const auto model = llama_get_model(context);
+ const auto vocab = llama_model_get_vocab(model);
+
+ if (!la_int_var) la_int_var = env->GetObjectClass(intvar_ncur);
+ if (!la_int_var_value) la_int_var_value = env->GetMethodID(la_int_var, "getValue", "()I");
+ if (!la_int_var_inc) la_int_var_inc = env->GetMethodID(la_int_var, "inc", "()V");
+
+ // sample the most likely token
+ const auto new_token_id = llama_sampler_sample(sampler, context, -1);
+
+ const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value);
+ if (llama_vocab_is_eog(vocab, new_token_id) || n_cur == n_len) {
+ return nullptr;
+ }
+
+ auto new_token_chars = common_token_to_piece(context, new_token_id);
+ cached_token_chars += new_token_chars;
+
+ jstring new_token = nullptr;
+ if (is_valid_utf8(cached_token_chars.c_str())) {
+ new_token = env->NewStringUTF(cached_token_chars.c_str());
+ LOGi("cached: %s, new_token_chars: `%s`, id: %d", cached_token_chars.c_str(), new_token_chars.c_str(), new_token_id);
+ cached_token_chars.clear();
+ } else {
+ new_token = env->NewStringUTF("");
+ }
+
+ common_batch_clear(*batch);
+ common_batch_add(*batch, new_token_id, n_cur, { 0 }, true);
+
+ env->CallVoidMethod(intvar_ncur, la_int_var_inc);
+
+ if (llama_decode(context, *batch) != 0) {
+ LOGe("llama_decode() returned null");
+ }
+
+ return new_token;
+}
+
+extern "C"
+JNIEXPORT void JNICALL
+Java_android_llama_cpp_LLamaAndroid_kv_1cache_1clear(JNIEnv *, jobject, jlong context) {
+ llama_memory_clear(llama_get_memory(reinterpret_cast(context)), true);
+}
diff --git a/Projects_2025/team7/llama.android/llama/src/main/java/android/llama/cpp/LLamaAndroid.kt b/Projects_2025/team7/llama.android/llama/src/main/java/android/llama/cpp/LLamaAndroid.kt
new file mode 100644
index 00000000..b964d93e
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/src/main/java/android/llama/cpp/LLamaAndroid.kt
@@ -0,0 +1,180 @@
+package android.llama.cpp
+
+import android.util.Log
+import kotlinx.coroutines.CoroutineDispatcher
+import kotlinx.coroutines.asCoroutineDispatcher
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.flow
+import kotlinx.coroutines.flow.flowOn
+import kotlinx.coroutines.withContext
+import java.util.concurrent.Executors
+import kotlin.concurrent.thread
+
+class LLamaAndroid {
+ private val tag: String? = this::class.simpleName
+
+ private val threadLocalState: ThreadLocal = ThreadLocal.withInitial { State.Idle }
+
+ private val runLoop: CoroutineDispatcher = Executors.newSingleThreadExecutor {
+ thread(start = false, name = "Llm-RunLoop") {
+ Log.d(tag, "Dedicated thread for native code: ${Thread.currentThread().name}")
+
+ // No-op if called more than once.
+ System.loadLibrary("llama-android")
+
+ // Set llama log handler to Android
+ log_to_android()
+ backend_init(false)
+
+ Log.d(tag, system_info())
+
+ it.run()
+ }.apply {
+ uncaughtExceptionHandler = Thread.UncaughtExceptionHandler { _, exception: Throwable ->
+ Log.e(tag, "Unhandled exception", exception)
+ }
+ }
+ }.asCoroutineDispatcher()
+
+ private val nlen: Int = 64
+
+ private external fun log_to_android()
+ private external fun load_model(filename: String): Long
+ private external fun free_model(model: Long)
+ private external fun new_context(model: Long): Long
+ private external fun free_context(context: Long)
+ private external fun backend_init(numa: Boolean)
+ private external fun backend_free()
+ private external fun new_batch(nTokens: Int, embd: Int, nSeqMax: Int): Long
+ private external fun free_batch(batch: Long)
+ private external fun new_sampler(): Long
+ private external fun free_sampler(sampler: Long)
+ private external fun bench_model(
+ context: Long,
+ model: Long,
+ batch: Long,
+ pp: Int,
+ tg: Int,
+ pl: Int,
+ nr: Int
+ ): String
+
+ private external fun system_info(): String
+
+ private external fun completion_init(
+ context: Long,
+ batch: Long,
+ text: String,
+ formatChat: Boolean,
+ nLen: Int
+ ): Int
+
+ private external fun completion_loop(
+ context: Long,
+ batch: Long,
+ sampler: Long,
+ nLen: Int,
+ ncur: IntVar
+ ): String?
+
+ private external fun kv_cache_clear(context: Long)
+
+ suspend fun bench(pp: Int, tg: Int, pl: Int, nr: Int = 1): String {
+ return withContext(runLoop) {
+ when (val state = threadLocalState.get()) {
+ is State.Loaded -> {
+ Log.d(tag, "bench(): $state")
+ bench_model(state.context, state.model, state.batch, pp, tg, pl, nr)
+ }
+
+ else -> throw IllegalStateException("No model loaded")
+ }
+ }
+ }
+
+ suspend fun load(pathToModel: String) {
+ withContext(runLoop) {
+ when (threadLocalState.get()) {
+ is State.Idle -> {
+ val model = load_model(pathToModel)
+ if (model == 0L) throw IllegalStateException("load_model() failed")
+
+ val context = new_context(model)
+ if (context == 0L) throw IllegalStateException("new_context() failed")
+
+ val batch = new_batch(512, 0, 1)
+ if (batch == 0L) throw IllegalStateException("new_batch() failed")
+
+ val sampler = new_sampler()
+ if (sampler == 0L) throw IllegalStateException("new_sampler() failed")
+
+ Log.i(tag, "Loaded model $pathToModel")
+ threadLocalState.set(State.Loaded(model, context, batch, sampler))
+ }
+ else -> throw IllegalStateException("Model already loaded")
+ }
+ }
+ }
+
+ fun send(message: String, formatChat: Boolean = false): Flow = flow {
+ when (val state = threadLocalState.get()) {
+ is State.Loaded -> {
+ val ncur = IntVar(completion_init(state.context, state.batch, message, formatChat, nlen))
+ while (ncur.value <= nlen) {
+ val str = completion_loop(state.context, state.batch, state.sampler, nlen, ncur)
+ if (str == null) {
+ break
+ }
+ emit(str)
+ }
+ kv_cache_clear(state.context)
+ }
+ else -> {}
+ }
+ }.flowOn(runLoop)
+
+ /**
+ * Unloads the model and frees resources.
+ *
+ * This is a no-op if there's no model loaded.
+ */
+ suspend fun unload() {
+ withContext(runLoop) {
+ when (val state = threadLocalState.get()) {
+ is State.Loaded -> {
+ free_context(state.context)
+ free_model(state.model)
+ free_batch(state.batch)
+ free_sampler(state.sampler);
+
+ threadLocalState.set(State.Idle)
+ }
+ else -> {}
+ }
+ }
+ }
+
+ companion object {
+ private class IntVar(value: Int) {
+ @Volatile
+ var value: Int = value
+ private set
+
+ fun inc() {
+ synchronized(this) {
+ value += 1
+ }
+ }
+ }
+
+ private sealed interface State {
+ data object Idle: State
+ data class Loaded(val model: Long, val context: Long, val batch: Long, val sampler: Long): State
+ }
+
+ // Enforce only one instance of Llm.
+ private val _instance: LLamaAndroid = LLamaAndroid()
+
+ fun instance(): LLamaAndroid = _instance
+ }
+}
diff --git a/Projects_2025/team7/llama.android/llama/src/test/java/android/llama/cpp/ExampleUnitTest.kt b/Projects_2025/team7/llama.android/llama/src/test/java/android/llama/cpp/ExampleUnitTest.kt
new file mode 100644
index 00000000..cbbb974d
--- /dev/null
+++ b/Projects_2025/team7/llama.android/llama/src/test/java/android/llama/cpp/ExampleUnitTest.kt
@@ -0,0 +1,17 @@
+package android.llama.cpp
+
+import org.junit.Test
+
+import org.junit.Assert.*
+
+/**
+ * Example local unit test, which will execute on the development machine (host).
+ *
+ * See [testing documentation](http://d.android.com/tools/testing).
+ */
+class ExampleUnitTest {
+ @Test
+ fun addition_isCorrect() {
+ assertEquals(4, 2 + 2)
+ }
+}
diff --git a/Projects_2025/team7/llama.android/settings.gradle.kts b/Projects_2025/team7/llama.android/settings.gradle.kts
new file mode 100644
index 00000000..c7c1a034
--- /dev/null
+++ b/Projects_2025/team7/llama.android/settings.gradle.kts
@@ -0,0 +1,18 @@
+pluginManagement {
+ repositories {
+ google()
+ mavenCentral()
+ gradlePluginPortal()
+ }
+}
+dependencyResolutionManagement {
+ repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
+ repositories {
+ google()
+ mavenCentral()
+ }
+}
+
+rootProject.name = "LlamaAndroid"
+include(":app")
+include(":llama")