summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorAki <please@ignore.pl>2024-07-19 23:55:23 +0200
committerAki <please@ignore.pl>2024-07-19 23:55:23 +0200
commit87c8a5e8955a57e6365adb4aa64575308b17c47d (patch)
tree96bac7ab83434cbd21dab9e58921e45c2cb7d830 /spec
parentd6189e60851ff347c8c11812025232621f54096e (diff)
downloadheaders-87c8a5e8955a57e6365adb4aa64575308b17c47d.zip
headers-87c8a5e8955a57e6365adb4aa64575308b17c47d.tar.gz
headers-87c8a5e8955a57e6365adb4aa64575308b17c47d.tar.bz2
Use multiset-like data structure for storing headers
This looks like an overkill because it is one. The space and time complexity of the multiset structure is rather bad. At this point it stays mostly because of sunk cost fallacy. It will provide OK abstraction layer to build the rest of this forsaken project.
Diffstat (limited to 'spec')
-rw-r--r--spec/multiset_spec.lua168
-rw-r--r--spec/parser_spec.lua21
2 files changed, 186 insertions, 3 deletions
diff --git a/spec/multiset_spec.lua b/spec/multiset_spec.lua
new file mode 100644
index 0000000..7639066
--- /dev/null
+++ b/spec/multiset_spec.lua
@@ -0,0 +1,168 @@
+local multiset = require "headers.multiset"
+
+
+describe("Multisets can be created", function()
+ it("without errors", function()
+ assert.has_no.errors(function()
+ local _ = multiset.new()
+ end)
+ end)
+end)
+
+
+describe("Size of multiset", function()
+ local set
+
+ setup(function()
+ set = multiset.new()
+ end)
+
+ it("can be checked with method", function()
+ assert.is_not_nil(set:size())
+ end)
+
+ it("can be checked with operator", function()
+ assert.is_not_nil(#set)
+ end)
+
+ it("is zero after creation", function()
+ assert.are.equal(0, #set)
+ end)
+end)
+
+
+describe("Values", function()
+ randomize(false)
+ local set
+
+ setup(function()
+ set = multiset.new()
+ end)
+
+ it("are not present before insertion", function()
+ assert.is_false(set:has"will-be-removed")
+ assert.is_false(set:has"will-stay")
+ assert.is_false(set:has"never-inserted")
+ end)
+
+ it("were not tracked before insertion", function()
+ assert.is_false(set:had"will-be-removed")
+ assert.is_false(set:had"will-stay")
+ assert.is_false(set:had"never-inserted")
+ end)
+
+ it("can be inserted", function()
+ assert.has_no.errors(function()
+ set:add"will-be-removed"
+ set:add"will-stay"
+ end)
+ end)
+
+ it("are present after insertion", function()
+ assert.is_true(set:has"will-be-removed")
+ assert.is_true(set:has"will-stay")
+ assert.is_false(set:has"never-inserted")
+ end)
+
+ it("are tracked after insertion", function()
+ assert.is_true(set:had"will-be-removed")
+ assert.is_true(set:had"will-stay")
+ assert.is_false(set:had"never-inserted")
+ end)
+
+ it("are counted in size", function()
+ assert.are.equal(2, #set)
+ end)
+
+ it("can be removed", function()
+ assert.has_no.errors(function()
+ set:remove"will-be-removed"
+ end)
+ end)
+
+ it("are no longer present after removal", function()
+ assert.is_false(set:has"will-be-removed")
+ assert.is_true(set:has"will-stay")
+ assert.is_false(set:has"never-inserted")
+ end)
+
+ it("are still tracked after removal", function()
+ assert.is_true(set:had"will-be-removed")
+ assert.is_true(set:had"will-stay")
+ assert.is_false(set:had"never-inserted")
+ end)
+
+ it("are not counted after removal", function()
+ assert.are.equal(1, #set)
+ end)
+end)
+
+
+describe("Iterator", function()
+ randomize(false)
+ local set
+
+ setup(function()
+ set = multiset.new()
+ set:add"a"
+ set:add"b"
+ set:add"c"
+ set:add"d"
+ end)
+
+ it("preserves order", function()
+ local items = {}
+ for value in set:all() do
+ table.insert(items, value)
+ end
+ assert.are.same({"a", "b", "c", "d"}, items)
+ end)
+
+ it("does not include past values", function()
+ set:remove"c"
+ set:add"e"
+ local items = {}
+ for value in set:all() do
+ table.insert(items, value)
+ end
+ assert.are.same({"a", "b", "d", "e"}, items)
+ end)
+
+ it("keeps the order of re-added values", function()
+ set:add"c"
+ local items = {}
+ for value in set:all() do
+ table.insert(items, value)
+ end
+ assert.are.same({"a", "b", "c", "d", "e"}, items)
+ end)
+end)
+
+
+describe("Clones", function()
+ randomize(false)
+ local a
+ local b
+
+ setup(function()
+ a = multiset.new()
+ a:add"set-in-a"
+ end)
+
+ it("can be created", function()
+ assert.has_no.errors(function()
+ b = a:clone()
+ end)
+ assert.is_not_nil(b)
+ end)
+
+ it("do not interfere with their original", function()
+ assert.is_true(a:has"set-in-a")
+ assert.is_true(b:has"set-in-a")
+ assert.is_false(a:has"set-in-b")
+ assert.is_false(b:has"set-in-b")
+ b:add"set-in-b"
+ assert.is_false(a:has"set-in-b")
+ assert.is_true(b:has"set-in-b")
+ end)
+end)
diff --git a/spec/parser_spec.lua b/spec/parser_spec.lua
index 78ee8b8..53f86a1 100644
--- a/spec/parser_spec.lua
+++ b/spec/parser_spec.lua
@@ -1,16 +1,31 @@
-local parse = require "headers.parser"
+local parser = require "headers.parser"
describe("Scheme", function()
+ local headers
+
+ setup(function()
+ headers = parser.new()
+ end)
+
it("version one is supported", function()
assert.has_no.errors(function()
- parse [[scheme "headers/1"]]
+ headers:parse [[scheme "headers/1"]]
end)
end)
it("is not required", function()
assert.has_no.errors(function()
- parse ""
+ headers:parse ""
end)
end)
end)
+
+
+describe("Standards (tags)", function() -- TODO: Streamline the naming convention standards/tags/aliases?
+ local headers
+
+ setup(function()
+ headers = parser.new()
+ end)
+end)