mirror of
https://git.joinsharkey.org/Sharkey/Sharkey.git
synced 2024-12-24 14:33:11 +02:00
Merge plainParser into mfm
This commit is contained in:
parent
ca26edbfce
commit
98795aad9a
3 changed files with 39 additions and 25 deletions
|
@ -1,4 +1,4 @@
|
|||
import parser, { plainParser } from './parser';
|
||||
import parser from './parser';
|
||||
import { MfmForest } from './types';
|
||||
import { normalize } from './normalize';
|
||||
|
||||
|
@ -7,6 +7,6 @@ export default (source: string, plainText = false): MfmForest => {
|
|||
return null;
|
||||
}
|
||||
|
||||
const raw = plainText ? plainParser.root.tryParse(source) : parser.root.tryParse(source) as MfmForest;
|
||||
const raw = plainText ? parser.plain.tryParse(source) : parser.root.tryParse(source) as MfmForest;
|
||||
return normalize(raw);
|
||||
};
|
||||
|
|
|
@ -28,29 +28,6 @@ const newline = P((input, i) => {
|
|||
}
|
||||
});
|
||||
|
||||
export const plainParser = P.createLanguage({
|
||||
root: r => P.alt(
|
||||
r.emoji,
|
||||
r.text
|
||||
).atLeast(1),
|
||||
|
||||
text: () => P.any.map(x => createLeaf('text', { text: x })),
|
||||
|
||||
//#region Emoji
|
||||
emoji: r =>
|
||||
P.alt(
|
||||
P.regexp(/:([a-z0-9_+-]+):/i, 1)
|
||||
.map(x => createLeaf('emoji', {
|
||||
name: x
|
||||
})),
|
||||
P.regexp(emojiRegex)
|
||||
.map(x => createLeaf('emoji', {
|
||||
emoji: x
|
||||
})),
|
||||
),
|
||||
//#endregion
|
||||
});
|
||||
|
||||
const mfm = P.createLanguage({
|
||||
root: r => P.alt(
|
||||
r.big,
|
||||
|
@ -78,6 +55,11 @@ const mfm = P.createLanguage({
|
|||
r.text
|
||||
).atLeast(1),
|
||||
|
||||
plain: r => P.alt(
|
||||
r.emoji,
|
||||
r.text
|
||||
).atLeast(1),
|
||||
|
||||
text: () => P.any.map(x => createLeaf('text', { text: x })),
|
||||
|
||||
//#region Big
|
||||
|
|
32
test/mfm.ts
32
test/mfm.ts
|
@ -1091,6 +1091,38 @@ describe('MFM', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('plainText', () => {
|
||||
it('text', () => {
|
||||
const tokens = analyze('foo', true);
|
||||
assert.deepStrictEqual(tokens, [
|
||||
text('foo'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('emoji', () => {
|
||||
const tokens = analyze(':foo:', true);
|
||||
assert.deepStrictEqual(tokens, [
|
||||
leaf('emoji', { name: 'foo' })
|
||||
]);
|
||||
});
|
||||
|
||||
it('emoji in text', () => {
|
||||
const tokens = analyze('foo:bar:baz', true);
|
||||
assert.deepStrictEqual(tokens, [
|
||||
text('foo'),
|
||||
leaf('emoji', { name: 'bar' }),
|
||||
text('baz'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('disallow other syntax', () => {
|
||||
const tokens = analyze('foo **bar** baz', true);
|
||||
assert.deepStrictEqual(tokens, [
|
||||
text('foo **bar** baz'),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('toHtml', () => {
|
||||
it('br', () => {
|
||||
const input = 'foo\nbar\nbaz';
|
||||
|
|
Loading…
Reference in a new issue