local tests = require("Module:UnitTests")
local m_grc = require("Module:User:Erutuon/grc")
local tokenize = m_grc.tokenize
local get_participle_information = m_grc.get_participle_information
local tag = require("Module:grc-utilities").tag
local compose = mw.ustring.toNFC
local function displayTokenization(example, isNoun)
return table.concat(tokenize(example, isNoun), ", ")
end
function tests:check_tokenize(example, expected, isNoun)
self:equals(
tag(example) .. (isNoun and " (noun)" or ""),
compose(displayTokenization(example, isNoun)),
compose(expected),
{ display = tag, show_difference = true }
)
end
local function show_participle_information(decl, feminine, neuter, lemma)
decl, feminine, neuter, lemma = decl or "?", feminine or "?", neuter or "?", lemma or "?"
return mw.ustring.toNFC(tag(feminine) .. ", " .. tag(neuter) .. " (" .. decl
.. ", " .. tag(lemma) .. ")")
end
local options = { show_difference = true }
function tests:check_get_participle_information(example, contraction_vowel, expected)
self:equals(
tag(example),
show_participle_information(get_participle_information(example, contraction_vowel)),
show_participle_information(unpack(expected)),
options)
end
function tests:test_get_participle_information()
local examples = {
{ "πῑ́πτων", nil, { "1&3", "πῑ́πτουσᾰ", "πῖπτον", "πῑ́πτω" } },
{ "λᾰβών", nil, { "1&3", "λᾰβοῦσᾰ", "λᾰβόν", nil } },
{ "τῑμῶν", "a", { "1&3", "τῑμῶσᾰ", "τῑμῶν", "τῑμᾰ́ω" } },
{ "ποιῶν", "e", { "1&3", "ποιοῦσᾰ", "ποιοῦν", "ποιέω" } },
{ "δηλῶν", "o", { "1&3", "δηλοῦσᾰ", "δηλοῦν", "δηλόω" } },
{ "εἰδώς", nil, { "1&3", "εἰδυῖᾰ", "εἰδός", nil } },
{ "λεγόμενος", nil, { "1&2", "λεγομένη", "λεγόμενον", "λέγω" } },
{ "θείς", nil, { "1&3", "θεῖσᾰ", "θέν", nil } },
{ "ᾰ̓ξῐώσᾱς", nil, { "1&3", "ᾰ̓ξῐώσᾱσᾰ", "ᾰ̓ξῐῶσᾰν", nil } },
}
self:iterate(examples, "check_get_participle_information")
end
function tests:test_tokenize()
local examples = {
{ "Λεωνίδης", "Λ, ε, ω, ν, ί, δ, η, ς", true },
{ "ἀληθέι", "ἀ, λ, η, θ, έ, ι" },
{ "πόλεως", "π, ό, λ, εω, ς", true },
{ "πόλεων", "π, ό, λ, εω, ν", true },
{ "οἷαι", "οἷ, αι" },
{ "ΟἿΑΙ", "ΟἿ, ΑΙ" },
{ "Αἰσχύλος", "Αἰ, σχ, ύ, λ, ο, ς" },
{ "ἀναῡ̈τέω", "ἀ, ν, α, ῡ̈, τ, έ, ω"},
{ "τούτῳ", "τ, ού, τ, ῳ" },
}
self:iterate(examples, "check_tokenize")
end
return tests