Signed-off-by: Drew DeVault <sir@cmpwn.com>
---
bytes/tokenize.ha | 8 ++++----
strings/tokenize.ha | 8 ++++----
2 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/bytes/tokenize.ha b/bytes/tokenize.ha
index 703e9ef8..c569a760 100644
--- a/bytes/tokenize.ha
@@ -164,16 +164,16 @@ fn tokenize_test(
const p = peek_token(&tok) as []u8;
const n = next_token(&tok) as []u8;
- assert(equal(p, n), testcase);
- assert(equal(n, want), testcase);
+ assert(equal(p, n));
+ assert(equal(n, want));
};
if (n >= iters) {
return tok;
};
- assert(peek_token(&tok) is done, testcase);
- assert(next_token(&tok) is done, testcase);
+ assert(peek_token(&tok) is done);
+ assert(next_token(&tok) is done);
return tok;
};
diff --git a/strings/tokenize.ha b/strings/tokenize.ha
index 03b76f77..3e4a4eac 100644
--- a/strings/tokenize.ha
+++ b/strings/tokenize.ha
@@ -95,16 +95,16 @@ fn tokenize_test(
const p = peek_token(&tok) as str;
const n = next_token(&tok) as str;
- assert(p == n, testcase);
- assert(n == want, testcase);
+ assert(p == n);
+ assert(n == want);
};
if (n >= iters) {
return tok;
};
- assert(peek_token(&tok) is done, testcase);
- assert(next_token(&tok) is done, testcase);
+ assert(peek_token(&tok) is done);
+ assert(next_token(&tok) is done);
return tok;
};
--
2.47.1
> - assert(equal(p, n), testcase);
> - assert(equal(n, want), testcase);
> + assert(equal(p, n));
> + assert(equal(n, want));
This is the only use of the testcase str. I think you can remove it from
the fn args and make it a comment, so
tokenize_test(&tokenize, "simple case", [1, 2, 0, 3, 4], [0], [
becomes
// simple case
tokenize_test(&tokenize, [1, 2, 0, 3, 4], [0], [
.