* feat(tools): add seed/solution restore script * chore(curriculum): remove empty sections' markers * chore(curriculum): add seed + solution to Chinese * chore: remove old formatter * fix: update getChallenges parse translated challenges separately, without reference to the source * chore(curriculum): add dashedName to English * chore(curriculum): add dashedName to Chinese * refactor: remove unused challenge property 'name' * fix: relax dashedName requirement * fix: stray tag Remove stray `pre` tag from challenge file. Signed-off-by: nhcarrigan <nhcarrigan@gmail.com> Co-authored-by: nhcarrigan <nhcarrigan@gmail.com>
1.8 KiB
1.8 KiB
id, title, challengeType, forumTopicId, dashedName
id | title | challengeType | forumTopicId | dashedName |
---|---|---|---|---|
599d15309e88c813a40baf58 | Entropy | 5 | 302254 | entropy |
--description--
Calculate the Shannon entropy H of a given input string.
Given the discreet random variable X
that is a string of N
"symbols" (total characters) consisting of n
different characters (n=2 for binary), the Shannon entropy of X in bits/symbol is:
H_2(X) = -\\sum\_{i=1}^n \\frac{count_i}{N} \\log_2 \\left(\\frac{count_i}{N}\\right)
where count_i
is the count of character n_i
.
--hints--
entropy
should be a function.
assert(typeof entropy === 'function');
entropy("0")
should return 0
assert.equal(entropy('0'), 0);
entropy("01")
should return 1
assert.equal(entropy('01'), 1);
entropy("0123")
should return 2
assert.equal(entropy('0123'), 2);
entropy("01234567")
should return 3
assert.equal(entropy('01234567'), 3);
entropy("0123456789abcdef")
should return 4
assert.equal(entropy('0123456789abcdef'), 4);
entropy("1223334444")
should return 1.8464393446710154
assert.equal(entropy('1223334444'), 1.8464393446710154);
--seed--
--seed-contents--
function entropy(s) {
}
--solutions--
function entropy(s) {
// Create a dictionary of character frequencies and iterate over it.
function process(s, evaluator) {
let h = Object.create(null),
k;
s.split('').forEach(c => {
h[c] && h[c]++ || (h[c] = 1); });
if (evaluator) for (k in h) evaluator(k, h[k]);
return h;
}
// Measure the entropy of a string in bits per symbol.
let sum = 0,
len = s.length;
process(s, (k, f) => {
const p = f / len;
sum -= p * Math.log(p) / Math.log(2);
});
return sum;
}