mirror of
https://github.com/We-Dont-Byte/Mind_Reader.git
synced 2024-11-15 03:35:59 +00:00
Merge branch 'master' into josiahMoses
This commit is contained in:
commit
e3ded114f7
18
package.json
18
package.json
@ -46,7 +46,18 @@
|
||||
{
|
||||
"command": "mind-reader.selectTheme",
|
||||
"title": "Select Theme"
|
||||
},
|
||||
|
||||
{
|
||||
"command": "mind-reader.runLineContext",
|
||||
"title": "Run Line Context"
|
||||
},
|
||||
|
||||
{
|
||||
"command": "mind-reader.runCursorContext",
|
||||
"title": "Run Cursor Context"
|
||||
}
|
||||
|
||||
],
|
||||
"keybindings": [
|
||||
{
|
||||
@ -212,7 +223,7 @@
|
||||
"LEGO® Education SPIKE™ Prime Set (45678)"
|
||||
]
|
||||
},
|
||||
"mindreader.screenReader": {
|
||||
"mindreader.reader.screenReader": {
|
||||
"type": "string",
|
||||
"description": "Specifies which screen reader to optimize for.",
|
||||
"default": "NVDA",
|
||||
@ -227,6 +238,11 @@
|
||||
"Apple VoiceOver (macOS)"
|
||||
]
|
||||
},
|
||||
"mindreader.reader.contextWindow": {
|
||||
"type": "number",
|
||||
"description": "The number of words around the cursor to use when reading the cursor context",
|
||||
"default": 1
|
||||
},
|
||||
"mindreader.connection.connectAutomatically": {
|
||||
"type": "boolean",
|
||||
"description": "Specifies whether to try to automatically detect and communicate with a connected Hub.",
|
||||
|
128
src/commands.ts
128
src/commands.ts
@ -1,4 +1,5 @@
|
||||
import * as vscode from 'vscode';
|
||||
import * as pl from './pylex';
|
||||
|
||||
/**
|
||||
* @type {Object} Command // Command to register with the VS Code Extension API
|
||||
@ -108,6 +109,14 @@ const commands: Command[] = [
|
||||
{
|
||||
name: 'mind-reader.navigateForward',
|
||||
callback: () => vscode.commands.executeCommand('workbench.action.navigateForward'),
|
||||
},
|
||||
{
|
||||
name: 'mind-reader.runLineContext',
|
||||
callback: runLineContext,
|
||||
},
|
||||
{
|
||||
name: 'mind-reader.runCursorContext',
|
||||
callback: runCursorContext
|
||||
}
|
||||
];
|
||||
|
||||
@ -137,4 +146,123 @@ function resetEditorScale(): void {
|
||||
vscode.commands.executeCommand('workbench.action.zoomReset');
|
||||
}
|
||||
|
||||
function runLineContext(): void {
|
||||
let editor = vscode.window.activeTextEditor;
|
||||
if (editor) {
|
||||
// current text and line number
|
||||
let editorText = editor.document.getText();
|
||||
let line = editor.selection.active.line;
|
||||
|
||||
// get tab info settings
|
||||
let size = parseInt(editor.options.tabSize as string);
|
||||
let hard = !editor.options.insertSpaces;
|
||||
|
||||
// initialize parser
|
||||
let parser = new pl.Parser(editorText, {size, hard});
|
||||
parser.parse();
|
||||
|
||||
let context = parser.context(line);
|
||||
|
||||
// build text
|
||||
let contentString = createContextString(context, line);
|
||||
vscode.window.showInformationMessage(contentString);
|
||||
} else {
|
||||
vscode.window.showErrorMessage('No document currently active');
|
||||
}
|
||||
}
|
||||
|
||||
function createContextString(context: pl.LexNode[], line: number): string {
|
||||
if (context.length < 1) {
|
||||
throw new Error('Cannot create context string for empty context');
|
||||
}
|
||||
|
||||
let contextString = 'Line ' + (line+1); // 1 based
|
||||
if (context[0].token && context[0].token.attr) {
|
||||
contextString += ': ' + context[0].token.type.toString() + ' ' + context[0].token.attr.toString();
|
||||
}
|
||||
for (let i = 1; i < context.length; i++) {
|
||||
let node = context[i];
|
||||
if (node.label === 'root') {
|
||||
// root
|
||||
contextString += ' in the Document Root';
|
||||
continue;
|
||||
}
|
||||
|
||||
if (node.token!.type !== pl.PylexSymbol.EMPTY &&
|
||||
node.token!.type !== pl.PylexSymbol.INDENT) {
|
||||
contextString += ' inside ' + node.token!.type.toString();
|
||||
if (node.token!.attr) {
|
||||
contextString += ' ' + node.token!.attr.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
return contextString;
|
||||
}
|
||||
|
||||
// find up to `n` words around the cursor, where `n` is
|
||||
// the value of `#mindreader.reader.contextWindow`
|
||||
function runCursorContext(): void {
|
||||
let editor = vscode.window.activeTextEditor;
|
||||
if (!editor) {
|
||||
vscode.window.showErrorMessage("RunCursorContext: No Active Editor");
|
||||
return;
|
||||
}
|
||||
|
||||
const cursorPos: vscode.Position = editor.selection.active;
|
||||
const text: string = editor.document.lineAt(cursorPos).text;
|
||||
const windowSize: number = vscode.workspace.getConfiguration('mindreader').get('reader.contextWindow')!;
|
||||
|
||||
let trimmedText = text.trimStart(); // trim leading whitespace
|
||||
let leadingWS = text.length - trimmedText.length; // # of characters of leading whitespace
|
||||
trimmedText = trimmedText.trimEnd(); // trim trailing whitespace
|
||||
let pos = leadingWS;
|
||||
let maxPos = text.length;
|
||||
|
||||
// clamp cursor start/end to new range
|
||||
let col = cursorPos.character; // effective column of the cursor position
|
||||
if (col < leadingWS) {
|
||||
// move effective start to first non-whitespace character in the line
|
||||
col = leadingWS;
|
||||
} else if (col > leadingWS + trimmedText.length - 1) {
|
||||
// move effective end to last non-whitespace character in the line
|
||||
col = leadingWS + trimmedText.length - 1;
|
||||
}
|
||||
|
||||
// generate list of space separate words with range data (start, end)
|
||||
// TODO: can find user position to be done in one pass
|
||||
let spaceWords: {word: string, start: number, end: number}[] = [];
|
||||
while (pos < maxPos && trimmedText.length > 0) {
|
||||
let word = trimmedText.replace(/ .*/, '');
|
||||
spaceWords.push({word, start: pos, end: pos+word.length});
|
||||
|
||||
// remove processed word from trimmed text
|
||||
const oldText = trimmedText;
|
||||
trimmedText = trimmedText.replace(/[^ ]+/, '').trimStart();
|
||||
|
||||
// update pos to start of next word
|
||||
pos += oldText.length - trimmedText.length;
|
||||
}
|
||||
|
||||
// find word the user is in
|
||||
let contextStart: number = -1, contextEnd: number = -1;
|
||||
for (let i = 0; i < spaceWords.length; i++) {
|
||||
if (col >= spaceWords[i].start && col <= spaceWords[i].end) {
|
||||
// found the word
|
||||
contextStart = Math.max(0, i - windowSize); // clamp start index
|
||||
contextEnd = Math.min(spaceWords.length, i + windowSize + 1); // clamp end index
|
||||
|
||||
// construct cursor context string
|
||||
let contextString = '';
|
||||
for (let i = contextStart; i < contextEnd; i++) {
|
||||
contextString += spaceWords[i].word + ' ';
|
||||
}
|
||||
|
||||
// output cursor context string
|
||||
vscode.window.showInformationMessage(contextString);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default commands;
|
||||
|
@ -4,4 +4,4 @@ export {default as LineToken} from './token';
|
||||
export {default as Lexer} from './lexer';
|
||||
export {default as LexNode} from './node';
|
||||
export {TabInfo as TabInfo} from './token';
|
||||
|
||||
export {Symbol as PylexSymbol} from './token';
|
||||
|
@ -159,14 +159,15 @@ export default class Lexer {
|
||||
}
|
||||
// No rules matched
|
||||
|
||||
// Skip this line if it is whitespace, comment, or empty
|
||||
// TODO: move to rules
|
||||
if (/^\s*(#.*)?$/.test(line)) {
|
||||
this.pos++;
|
||||
continue;
|
||||
// "empty" line
|
||||
token = new LineToken(Symbol.EMPTY, this.pos, 999999);
|
||||
} else {
|
||||
// This is an INDENT token
|
||||
token = new LineToken(Symbol.INDENT, this.pos, indent);
|
||||
}
|
||||
|
||||
// This is an INDENT token
|
||||
token = new LineToken(Symbol.INDENT, this.pos, indent);
|
||||
this._currToken = token;
|
||||
this.pos++;
|
||||
return this.currToken();
|
||||
|
@ -2,6 +2,8 @@ import * as vscode from 'vscode';
|
||||
|
||||
import LineToken from './token';
|
||||
|
||||
/* TODO: make accessing children and parent less tedious */
|
||||
/* TODO: 'root.children()![i])' */
|
||||
/**
|
||||
* A node in a Parse tree.
|
||||
*/
|
||||
@ -46,8 +48,9 @@ export default class LexNode extends vscode.TreeItem {
|
||||
* Adopt child nodes.
|
||||
*
|
||||
* @param `child` Array of nodes to adopt.
|
||||
* @returns an updated version of itself
|
||||
*/
|
||||
adopt(children: LexNode[]): void {
|
||||
adopt(children: LexNode[]): LexNode {
|
||||
let parentedChildren = children.map(c => new LexNode(
|
||||
c.label,
|
||||
c.collapsibleState,
|
||||
@ -64,6 +67,7 @@ export default class LexNode extends vscode.TreeItem {
|
||||
// No....
|
||||
this._children = parentedChildren;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -3,6 +3,7 @@ import * as vscode from 'vscode';
|
||||
import { EOFTOKEN, Symbol, TabInfo } from './token';
|
||||
import Lexer from './lexer';
|
||||
import LexNode from './node';
|
||||
/* TODO: update design doc */
|
||||
|
||||
/**
|
||||
* A parse tree generator
|
||||
@ -71,8 +72,20 @@ export default class Parser {
|
||||
// go up 1 level of recursion at a time to unravel properly
|
||||
this.currIndent--;
|
||||
return children;
|
||||
} else if (this.lexer.currToken().type === Symbol.INDENT) {
|
||||
}
|
||||
|
||||
if (this.lexer.currToken().type === Symbol.INDENT ||
|
||||
this.lexer.currToken().type === Symbol.EMPTY) {
|
||||
const label = this.lexer.currToken().type;
|
||||
// regular code, advance and stay in same block
|
||||
children.push(new LexNode(
|
||||
label,
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
this.lexer.currToken(),
|
||||
null,
|
||||
parent)
|
||||
);
|
||||
|
||||
this.lexer.next();
|
||||
continue;
|
||||
} else {
|
||||
@ -105,29 +118,28 @@ export default class Parser {
|
||||
* @param `lineNumber` The line number to query context for.
|
||||
* @return An array of LexNodes for the root path containing `lineNumber`
|
||||
*/
|
||||
context(lineNumber: number): LexNode[] {
|
||||
if (!this.root.children()) {
|
||||
return [];
|
||||
context(lineNumber: number, root?: LexNode): LexNode[] {
|
||||
if (!root) {
|
||||
root = this.root;
|
||||
}
|
||||
|
||||
// Returns the LexNode that is the parent
|
||||
// of the queried line number
|
||||
let find = (root: LexNode): LexNode | undefined => {
|
||||
let prevChild: LexNode;
|
||||
for (var child of root.children()!) {
|
||||
if (lineNumber < child.token!.linenr) {
|
||||
if (prevChild!.children()) {
|
||||
return find(prevChild!);
|
||||
} else {
|
||||
return prevChild!;
|
||||
}
|
||||
} else {
|
||||
prevChild = child;
|
||||
// is this the target?
|
||||
if (root.token && root.token!.linenr === lineNumber) {
|
||||
// match
|
||||
return root.rootPath();
|
||||
}
|
||||
|
||||
if (root.children()) {
|
||||
// recursive call
|
||||
for (let i = 0; i < root.children()!.length; i++) {
|
||||
let ctx = this.context(lineNumber, root.children()![i]);
|
||||
if (ctx.length > 0) {
|
||||
// a rootpath was returned
|
||||
return ctx;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let target = find(this.root);
|
||||
return target!.rootPath();
|
||||
}
|
||||
// no matches
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ export enum Symbol {
|
||||
FINALLY = "finally",
|
||||
WITH = "with",
|
||||
INDENT = "INDENT", // Indent token, default if not EOF, only contains indent information
|
||||
EMPTY = "EMPTY", // empty line, used only to associate with the previous line
|
||||
EOF = "EOF"
|
||||
}
|
||||
|
||||
|
@ -17,13 +17,18 @@ suite('Lexer Test Suite', () => {
|
||||
});
|
||||
|
||||
test('Undefined', () => {
|
||||
let l: Lexer = new Lexer('');
|
||||
let l: Lexer = new Lexer(undefined);
|
||||
assert.deepStrictEqual(l.currToken(), EOFTOKEN);
|
||||
});
|
||||
|
||||
test('Whitespace', () => {
|
||||
let l: Lexer = new Lexer(' \t\t'.repeat(4).repeat(4));
|
||||
assert.deepStrictEqual(l.currToken(), EOFTOKEN);
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.EMPTY, 0, 999999));
|
||||
});
|
||||
|
||||
test('Comment', () => {
|
||||
let l: Lexer = new Lexer('# ur mom eats toes');
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.EMPTY, 0, 999999));
|
||||
});
|
||||
|
||||
test('Non-Whitespace with no construct', () => {
|
||||
@ -120,98 +125,6 @@ suite('Lexer Test Suite', () => {
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.IF, 0, 0, 'is_old()'));
|
||||
});
|
||||
|
||||
test('next() ignores empty lines', () => {
|
||||
let lines: string[] = [
|
||||
'if wurst_available():',
|
||||
'',
|
||||
' eat_wurst()'
|
||||
];
|
||||
let l: Lexer = new Lexer(lines.join('\n'));
|
||||
|
||||
l.next();
|
||||
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.INDENT, 2, 1));
|
||||
});
|
||||
|
||||
test('retract() ignores empty lines', () => {
|
||||
let lines: string[] = [
|
||||
'if wurst_available():',
|
||||
'',
|
||||
' eat_wurst()'
|
||||
];
|
||||
let l: Lexer = new Lexer(lines.join('\n'));
|
||||
|
||||
l.next();
|
||||
|
||||
l.retract();
|
||||
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.IF, 0, 0, 'wurst_available()'));
|
||||
});
|
||||
|
||||
test('next() ignores whitespace lines', () => {
|
||||
let lines: string[] = [
|
||||
'if wurst_available():',
|
||||
' \t \t ',
|
||||
' eat_wurst()'
|
||||
];
|
||||
let l: Lexer = new Lexer(lines.join('\n'));
|
||||
|
||||
l.next();
|
||||
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.INDENT, 2, 1));
|
||||
});
|
||||
|
||||
test('retract() ignores whitespace lines', () => {
|
||||
let lines: string[] = [
|
||||
'if wurst_available():',
|
||||
' \t \t ',
|
||||
' eat_wurst()'
|
||||
];
|
||||
let l: Lexer = new Lexer(lines.join('\n'));
|
||||
|
||||
// Advance to end of input
|
||||
// Eliminates dependence on next()
|
||||
// skipping whitespace
|
||||
do {} while (l.next() !== EOFTOKEN);
|
||||
|
||||
l.retract(); // retract past EOFTOKEn
|
||||
l.retract();
|
||||
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.IF, 0, 0, 'wurst_available()'));
|
||||
});
|
||||
|
||||
test('next() ignores comment lines', () => {
|
||||
let lines: string[] = [
|
||||
'if wurst_available():',
|
||||
' \t # I hate testing \t',
|
||||
' eat_wurst()'
|
||||
];
|
||||
let l: Lexer = new Lexer(lines.join('\n'));
|
||||
|
||||
l.next();
|
||||
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.INDENT, 2, 1));
|
||||
});
|
||||
|
||||
test('retract() ignores comment lines', () => {
|
||||
let lines: string[] = [
|
||||
'if wurst_available():',
|
||||
' \t # \t',
|
||||
' eat_wurst()'
|
||||
];
|
||||
let l: Lexer = new Lexer(lines.join('\n'));
|
||||
|
||||
// Advance to end of input
|
||||
// Eliminates dependence on next()
|
||||
// skipping comment
|
||||
do {} while (l.next() !== EOFTOKEN);
|
||||
|
||||
l.retract(); // retract past EOFTOKEn
|
||||
l.retract();
|
||||
|
||||
assert.deepStrictEqual(l.currToken(), new LineToken(Symbol.IF, 0, 0, 'wurst_available()'));
|
||||
});
|
||||
|
||||
test('next() out of range', () => {
|
||||
let l: Lexer = new Lexer('foo = zaboomafoo');
|
||||
l.next();
|
||||
|
@ -1,51 +1,61 @@
|
||||
import * as assert from 'assert';
|
||||
import * as vscode from 'vscode';
|
||||
import { after } from 'mocha';
|
||||
import { deparent, root } from '../../util';
|
||||
|
||||
import Parser from '../../../pylex/parser';
|
||||
import LexNode from '../../../pylex/node';
|
||||
import LineToken from '../../../pylex/token';
|
||||
import { Symbol } from '../../../pylex/token';
|
||||
|
||||
import { root,indent,empty } from '../../util';
|
||||
|
||||
/**
|
||||
* Test Descriptor
|
||||
*/
|
||||
type ParserTest = {
|
||||
name: string,
|
||||
input: string[],
|
||||
output: LexNode,
|
||||
name: string, // short name for the test
|
||||
input: string[], // input lines for the test
|
||||
output: LexNode // expected output. outputs are compared for token equality *only*
|
||||
};
|
||||
|
||||
const tests: ParserTest[] = [
|
||||
{ name: 'No Input', input: [], output: root(null) },
|
||||
{ name: 'Single Empty Line', input: [''], output: root(null) },
|
||||
{
|
||||
name: 'No Input',
|
||||
input: [ ],
|
||||
output: root(null),
|
||||
},
|
||||
|
||||
{
|
||||
name: 'Single line without construct',
|
||||
input: [ 'foo = "Yellow M&Ms make me angry >:(' ],
|
||||
output: root(null),
|
||||
},
|
||||
|
||||
{
|
||||
name: 'Single line with construct',
|
||||
input: [ 'for x of y:' ],
|
||||
name: 'Single Whitespace Only Line',
|
||||
input: [' '],
|
||||
output: root([
|
||||
new LexNode(
|
||||
'for x of y',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.FOR, 0, 0, 'x of y')
|
||||
)
|
||||
]),
|
||||
empty(0)
|
||||
])
|
||||
},
|
||||
{
|
||||
name: 'Single Comment Only Line',
|
||||
input: ['# ur mom likes peas'],
|
||||
output: root([
|
||||
empty(0)
|
||||
])
|
||||
},
|
||||
{
|
||||
name: 'Single Non-Control Line',
|
||||
input: ['my_age = 42'],
|
||||
output: root([
|
||||
indent(0, 0)
|
||||
])
|
||||
},
|
||||
{
|
||||
name: 'Single Control Line',
|
||||
input: ['while True:'],
|
||||
output: root([
|
||||
new LexNode('', 0, new LineToken(Symbol.WHILE, 0, 0, 'True'))
|
||||
])
|
||||
},
|
||||
|
||||
{
|
||||
name: 'Sequential lines, without construct',
|
||||
input: [
|
||||
'bar = "Blue M&Ms make me happy <:)"',
|
||||
'reba = "A hard working gal"'
|
||||
],
|
||||
output: root(null),
|
||||
output: root([indent(0,0), indent(1,0)]),
|
||||
},
|
||||
|
||||
{
|
||||
@ -58,7 +68,9 @@ const tests: ParserTest[] = [
|
||||
output: root([
|
||||
new LexNode('if radioshack',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.IF, 0, 0, 'radioshack'))
|
||||
new LineToken(Symbol.IF, 0, 0, 'radioshack'),
|
||||
[indent(1, 1)]),
|
||||
indent(2, 0)
|
||||
])
|
||||
},
|
||||
|
||||
@ -70,9 +82,11 @@ const tests: ParserTest[] = [
|
||||
' print radioshack.hours'
|
||||
],
|
||||
output: root([
|
||||
indent(0, 0),
|
||||
new LexNode('if radioshack',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.IF, 1, 0, 'radioshack'))
|
||||
new LineToken(Symbol.IF, 1, 0, 'radioshack'),
|
||||
[indent(2, 1)])
|
||||
])
|
||||
},
|
||||
|
||||
@ -89,13 +103,13 @@ const tests: ParserTest[] = [
|
||||
output: root([
|
||||
new LexNode('if yummy',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.IF, 0, 0, 'yummy')),
|
||||
new LineToken(Symbol.IF, 0, 0, 'yummy'), [indent(1, 1)]),
|
||||
new LexNode('elif just_ok',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.ELIF, 2, 0, 'just_ok')),
|
||||
new LineToken(Symbol.ELIF, 2, 0, 'just_ok'), [indent(3, 1)]),
|
||||
new LexNode('else',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.ELSE, 4, 0)),
|
||||
new LineToken(Symbol.ELSE, 4, 0), [indent(5,1)]),
|
||||
])
|
||||
},
|
||||
|
||||
@ -113,8 +127,9 @@ const tests: ParserTest[] = [
|
||||
[
|
||||
new LexNode('if in_my_tummy',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.IF, 1, 1, 'in_my_tummy'))
|
||||
]
|
||||
new LineToken(Symbol.IF, 1, 1, 'in_my_tummy'),
|
||||
[indent(2, 2)])
|
||||
],
|
||||
)
|
||||
])
|
||||
},
|
||||
@ -135,12 +150,14 @@ const tests: ParserTest[] = [
|
||||
[
|
||||
new LexNode('if in_my_tummy',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.IF, 1, 1, 'in_my_tummy'))
|
||||
new LineToken(Symbol.IF, 1, 1, 'in_my_tummy'),
|
||||
[indent(2, 2)])
|
||||
]
|
||||
),
|
||||
new LexNode('else',
|
||||
new LexNode('else',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.ELSE, 3, 0),
|
||||
[indent(4, 1)]
|
||||
)
|
||||
])
|
||||
},
|
||||
@ -166,7 +183,8 @@ const tests: ParserTest[] = [
|
||||
[
|
||||
new LexNode('if looks_like_a_mummy',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.IF, 2, 2, 'looks_like_a_mummy'))
|
||||
new LineToken(Symbol.IF, 2, 2, 'looks_like_a_mummy'),
|
||||
[indent(3, 3)])
|
||||
]
|
||||
)
|
||||
]
|
||||
@ -174,12 +192,13 @@ const tests: ParserTest[] = [
|
||||
new LexNode('else',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.ELSE, 4, 0),
|
||||
[indent(5, 1)]
|
||||
)
|
||||
])
|
||||
},
|
||||
|
||||
{
|
||||
name: 'Doubly Nested Block, with multiple indent resets',
|
||||
name: 'Doubly Nested Block, with multiple indent resets > 1',
|
||||
input: [
|
||||
'if yummy:',
|
||||
' if in_my_tummy:',
|
||||
@ -188,7 +207,7 @@ const tests: ParserTest[] = [
|
||||
' else:',
|
||||
' print("eek! a zombie!)',
|
||||
' elif in_my_mouth:',
|
||||
' print("ill be in my tummy soon!"',
|
||||
' print("itll be in my tummy soon!"',
|
||||
'else:',
|
||||
' print("Food is food...")'
|
||||
],
|
||||
@ -203,25 +222,70 @@ const tests: ParserTest[] = [
|
||||
[
|
||||
new LexNode('if looks_like_a_mummy',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.IF, 2, 2, 'looks_like_a_mummy')),
|
||||
new LineToken(Symbol.IF, 2, 2, 'looks_like_a_mummy'),
|
||||
[indent(3, 3)]),
|
||||
new LexNode('else',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.ELSE, 4, 2))
|
||||
new LineToken(Symbol.ELSE, 4, 2),
|
||||
[indent(5, 3)])
|
||||
]
|
||||
),
|
||||
new LexNode('elif in_my_mouth',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.ELIF, 6, 1, 'in_my_mouth'))
|
||||
new LineToken(Symbol.ELIF, 6, 1, 'in_my_mouth'),
|
||||
[indent(7, 2)]
|
||||
)
|
||||
]
|
||||
),
|
||||
new LexNode('else',
|
||||
vscode.TreeItemCollapsibleState.None,
|
||||
new LineToken(Symbol.ELSE, 8, 0)
|
||||
new LineToken(Symbol.ELSE, 8, 0),
|
||||
[indent(9, 1)]
|
||||
)
|
||||
])
|
||||
},
|
||||
{
|
||||
name: 'Multiline Block',
|
||||
input: [
|
||||
'if yummy:',
|
||||
' print("you have a spot on your tummy"',
|
||||
' print("eek! a zombie!)',
|
||||
' print("itll be in my tummy soon!"',
|
||||
'else:',
|
||||
' print("Food is food...")'
|
||||
],
|
||||
output: root([
|
||||
new LexNode('if yummy', 0, new LineToken(Symbol.IF, 0, 0, 'yummy'),
|
||||
[
|
||||
indent(1, 1),
|
||||
indent(2, 1),
|
||||
indent(3, 1),
|
||||
]
|
||||
),
|
||||
new LexNode('else', 0, new LineToken(Symbol.ELSE, 4, 0), [indent(5, 1)])
|
||||
])
|
||||
}
|
||||
];
|
||||
|
||||
/* Checks for strict equality between the tokens of a lex node tree */
|
||||
const checkEq = (reference: LexNode, subject: LexNode) => {
|
||||
if (!reference.children()) {
|
||||
// subject should also have no children
|
||||
assert.deepStrictEqual(subject.children(), null);
|
||||
return;
|
||||
}
|
||||
|
||||
assert.notStrictEqual(subject.children(), null);
|
||||
assert.deepStrictEqual(reference.children()!.length, subject.children()!.length);
|
||||
for (let i = 0; i < subject.children()!.length; i++) {
|
||||
// compare top level nodes
|
||||
assert.deepStrictEqual(reference.children()![i].token, subject.children()![i].token);
|
||||
|
||||
// compare all children
|
||||
checkEq(reference.children()![i], subject.children()![i]);
|
||||
}
|
||||
};
|
||||
|
||||
suite('Parser Test Suite', () => {
|
||||
after(() => {
|
||||
vscode.window.showInformationMessage('All tests passed!');
|
||||
@ -230,10 +294,8 @@ suite('Parser Test Suite', () => {
|
||||
for (var t of tests) {
|
||||
let currTest = t; // without this, all test calls get the last test
|
||||
test(currTest.name, () => {
|
||||
let result: LexNode = deparent(new Parser(currTest.input.join('\n')).parse());
|
||||
process.stdout.write(Object.entries(result).toString());
|
||||
|
||||
assert.deepStrictEqual(result, currTest.output);
|
||||
let result = new Parser(currTest.input.join('\n')).parse();
|
||||
checkEq(currTest.output, result);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
import LexNode from '../pylex/node';
|
||||
import { LineToken, PylexSymbol, LexNode} from '../pylex';
|
||||
|
||||
/**
|
||||
* TODO: Eliminate need for me.
|
||||
@ -50,8 +49,20 @@ function root(nodes: LexNode[] | null): LexNode {
|
||||
);
|
||||
}
|
||||
|
||||
/* short hand for returning an indentation token for a certain line and indentation */
|
||||
function indent(linenr: number, indentLevel: number): LexNode {
|
||||
return new LexNode('INDENT', 0, new LineToken(PylexSymbol.INDENT, linenr, indentLevel));
|
||||
}
|
||||
|
||||
/* short hand for returning an empty token for a certain line*/
|
||||
function empty(linenr: number): LexNode {
|
||||
return new LexNode('EMPTY', 0, new LineToken(PylexSymbol.EMPTY, linenr, 999999));
|
||||
}
|
||||
|
||||
|
||||
export {
|
||||
deparent,
|
||||
root,
|
||||
indent,
|
||||
empty
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user