diff --git a/README.md b/README.md
index b965c6f1..0309b7d2 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-
+

# Java Bindings for [llama.cpp](https://github.com/ggerganov/llama.cpp)
diff --git a/pom.xml b/pom.xml
index 5b00bb42..224bfe4b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
de.kherud
llama
- 3.3.0
+ 3.3.1
jar
${project.groupId}:${project.artifactId}
@@ -56,12 +56,6 @@
${junit.version}
test
-
- org.jetbrains
- annotations
- 24.1.0
- compile
-
diff --git a/src/main/java/de/kherud/llama/LlamaIterable.java b/src/main/java/de/kherud/llama/LlamaIterable.java
index 7e6dff89..cf228e0c 100644
--- a/src/main/java/de/kherud/llama/LlamaIterable.java
+++ b/src/main/java/de/kherud/llama/LlamaIterable.java
@@ -1,14 +1,11 @@
package de.kherud.llama;
-import org.jetbrains.annotations.NotNull;
-
/**
* An iterable used by {@link LlamaModel#generate(InferenceParameters)} that specifically returns a {@link LlamaIterator}.
*/
@FunctionalInterface
public interface LlamaIterable extends Iterable {
- @NotNull
@Override
LlamaIterator iterator();
diff --git a/src/main/java/de/kherud/llama/LlamaLoader.java b/src/main/java/de/kherud/llama/LlamaLoader.java
index a0239d20..614a33d1 100644
--- a/src/main/java/de/kherud/llama/LlamaLoader.java
+++ b/src/main/java/de/kherud/llama/LlamaLoader.java
@@ -28,8 +28,6 @@
import java.util.List;
import java.util.stream.Stream;
-import org.jetbrains.annotations.Nullable;
-
/**
* Set the system properties, de.kherud.llama.lib.path, de.kherud.llama.lib.name, appropriately so that the
* library can find *.dll, *.dylib and *.so files, according to the current OS (win, linux, mac).
@@ -182,7 +180,6 @@ private static boolean loadNativeLibrary(Path path) {
}
}
- @Nullable
private static Path extractFile(String sourceDirectory, String fileName, String targetDirectory, boolean addUuid) {
String nativeLibraryFilePath = sourceDirectory + "/" + fileName;
diff --git a/src/main/java/de/kherud/llama/LlamaModel.java b/src/main/java/de/kherud/llama/LlamaModel.java
index b78e056e..239ee862 100644
--- a/src/main/java/de/kherud/llama/LlamaModel.java
+++ b/src/main/java/de/kherud/llama/LlamaModel.java
@@ -1,7 +1,6 @@
package de.kherud.llama;
import de.kherud.llama.args.LogFormat;
-import org.jetbrains.annotations.Nullable;
import java.lang.annotation.Native;
import java.nio.charset.StandardCharsets;
@@ -106,9 +105,9 @@ public String decode(int[] tokens) {
* To disable logging, pass an empty callback, i.e., (level, msg) -> {}
.
*
* @param format the log format to use
- * @param callback a method to call for log messages
+ * @param callback a method to call for log messages (can be set to null)
*/
- public static native void setLogger(LogFormat format, @Nullable BiConsumer callback);
+ public static native void setLogger(LogFormat format, BiConsumer callback);
@Override
public void close() {
diff --git a/src/main/java/de/kherud/llama/LlamaOutput.java b/src/main/java/de/kherud/llama/LlamaOutput.java
index 365b335e..fd2c20f1 100644
--- a/src/main/java/de/kherud/llama/LlamaOutput.java
+++ b/src/main/java/de/kherud/llama/LlamaOutput.java
@@ -1,7 +1,5 @@
package de.kherud.llama;
-import org.jetbrains.annotations.NotNull;
-
import java.nio.charset.StandardCharsets;
import java.util.Map;
@@ -13,20 +11,19 @@ public final class LlamaOutput {
/**
* The last bit of generated text that is representable as text (i.e., cannot be individual utf-8 multibyte code
- * points).
+ * points). Not null.
*/
- @NotNull
public final String text;
/**
- * Note, that you have to configure {@link InferenceParameters#setNProbs(int)} in order for probabilities to be returned.
+ * Note, that you have to configure {@link InferenceParameters#setNProbs(int)} in order for probabilities to be
+ * returned. Not null.
*/
- @NotNull
public final Map probabilities;
final boolean stop;
- LlamaOutput(byte[] generated, @NotNull Map probabilities, boolean stop) {
+ LlamaOutput(byte[] generated, Map probabilities, boolean stop) {
this.text = new String(generated, StandardCharsets.UTF_8);
this.probabilities = probabilities;
this.stop = stop;
diff --git a/src/test/java/de/kherud/llama/LlamaModelTest.java b/src/test/java/de/kherud/llama/LlamaModelTest.java
index b5481cef..c0404bdd 100644
--- a/src/test/java/de/kherud/llama/LlamaModelTest.java
+++ b/src/test/java/de/kherud/llama/LlamaModelTest.java
@@ -88,7 +88,7 @@ public void testGenerateGrammar() {
}
String output = sb.toString();
- Assert.assertTrue(output.matches("[ab]+"));
+ Assert.assertTrue("'" + output + "' doesn't match [ab]+", output.matches("[ab]+"));
int generated = model.encode(output).length;
Assert.assertTrue(generated > 0 && generated <= nPredict + 1);
}
@@ -131,7 +131,7 @@ public void testCompleteGrammar() {
.setGrammar("root ::= (\"a\" | \"b\")+")
.setNPredict(nPredict);
String output = model.complete(params);
- Assert.assertTrue(output + " doesn't match [ab]+", output.matches("[ab]+"));
+ Assert.assertTrue("'" + output + "' doesn't match [ab]+", output.matches("[ab]+"));
int generated = model.encode(output).length;
Assert.assertTrue(generated > 0 && generated <= nPredict + 1);
}