PageRenderTime 56ms CodeModel.GetById 29ms RepoModel.GetById 0ms app.codeStats 0ms

/irony-47261/Irony.Samples/SourceSamples/c#/Irony_GrammarDataBuilder.cs

https://bitbucket.org/earwicker/spec
C# | 610 lines | 469 code | 43 blank | 98 comment | 125 complexity | b796739b07d2f4926ff324139f353585 MD5 | raw file
Possible License(s): Apache-2.0, IPL-1.0, GPL-2.0, LGPL-2.1, MPL-2.0-no-copyleft-exception, BSD-3-Clause
  1. #region License
  2. /* **********************************************************************************
  3. * Copyright (c) Roman Ivantsov
  4. * This source code is subject to terms and conditions of the MIT License
  5. * for Irony. A copy of the license can be found in the License.txt file
  6. * at the root of this distribution.
  7. * By using this source code in any fashion, you are agreeing to be bound by the terms of the
  8. * MIT License.
  9. * You must not remove this notice from this software.
  10. * **********************************************************************************/
  11. #endregion
  12. using System;
  13. using System.Collections.Generic;
  14. using System.Text;
  15. using System.Collections;
  16. using System.Diagnostics;
  17. namespace Irony.Compiler {
  18. // This class contains all complex logic of extracting Parser/Scanner's DFA tables and other control information
  19. // from the language grammar.
  20. // Warning: unlike other classes in this project, understanding what's going on here requires some knowledge of
  21. // LR/LALR parsing algorithms. For this I refer you to the Dragon book or any other book on compiler/parser construction.
  22. public class GrammarDataBuilder {
  23. class ShiftTable : Dictionary<string, LR0ItemList> { }
  24. private ParserStateTable _stateHash;
  25. public readonly GrammarData Data;
  26. Grammar _grammar;
  27. public GrammarDataBuilder(Grammar grammar) {
  28. _grammar = grammar;
  29. Data = new GrammarData();
  30. Data.Grammar = _grammar;
  31. }
  32. public void Build() {
  33. try {
  34. Data.ScannerRecoverySymbols = _grammar.WhitespaceChars + _grammar.Delimiters;
  35. if (_grammar.Root == null)
  36. Cancel("Root property of the grammar is not set.");
  37. //Create the augmented root for the grammar
  38. Data.AugmentedRoot = new NonTerminal(_grammar.Root.Name + "'", new BnfExpression(_grammar.Root));
  39. //Collect all terminals and non-terminals into corresponding collections
  40. CollectAllElements();
  41. //Adjust case for Symbols for case-insensitive grammar (change keys to lowercase)
  42. if (!_grammar.CaseSensitive)
  43. AdjustCaseForSymbols();
  44. //Create productions and LR0Items
  45. CreateProductions();
  46. //Calculate nullability, Firsts and TailFirsts collections of all non-terminals
  47. CalculateNullability();
  48. CalculateFirsts();
  49. CalculateTailFirsts();
  50. //Create parser states list, including initial and final states
  51. CreateParserStates();
  52. //Propagate Lookaheads
  53. PropagateLookaheads();
  54. //Debug.WriteLine("Time of PropagateLookaheads: " + time);
  55. //now run through all states and create Reduce actions
  56. CreateReduceActions();
  57. //finally check for conflicts and detect Operator-based actions
  58. CheckActionConflicts();
  59. //call Init on all elements in the grammar
  60. InitAll();
  61. //Build hash table of terminals for fast lookup by current input char; note that this must be run after Init
  62. BuildTerminalsLookupTable();
  63. //Validate
  64. ValidateAll();
  65. } catch (GrammarErrorException e) {
  66. Data.Errors.Add(e.Message);
  67. Data.AnalysisCanceled = true;
  68. }
  69. }//method
  70. private void Cancel(string msg) {
  71. if (msg == null) msg = "Grammar analysis canceled.";
  72. throw new GrammarErrorException(msg);
  73. }
  74. #region Collecting non-terminals
  75. int _unnamedCount; //internal counter for generating names for unnamed non-terminals
  76. private void CollectAllElements() {
  77. Data.NonTerminals.Clear();
  78. Data.Terminals.Clear();
  79. Data.Terminals.AddRange(_grammar.ExtraTerminals);
  80. _unnamedCount = 0;
  81. CollectAllElementsRecursive(Data.AugmentedRoot);
  82. Data.Terminals.Sort(Terminal.ByName);
  83. if (Data.AnalysisCanceled)
  84. Cancel(null);
  85. }
  86. private void CollectAllElementsRecursive(BnfTerm element) {
  87. //Terminal
  88. Terminal term = element as Terminal;
  89. // Do not add pseudo terminals defined as static singletons in Grammar class (Empty, Eof, etc)
  90. // We will never see these terminals in the input stream.
  91. // Filter them by type - their type is exactly "Terminal", not derived class.
  92. if (term != null && !Data.Terminals.Contains(term) && term.GetType() != typeof(Terminal)) {
  93. Data.Terminals.Add(term);
  94. return;
  95. }
  96. //NonTerminal
  97. NonTerminal nt = element as NonTerminal;
  98. if (nt == null || Data.NonTerminals.Contains(nt))
  99. return;
  100. if (nt.Name == null)
  101. nt.Name = "NT" + (_unnamedCount++);
  102. Data.NonTerminals.Add(nt);
  103. if (nt.Rule == null) {
  104. AddError("Non-terminal {0} has uninitialized Rule property.", nt.Name);
  105. Data.AnalysisCanceled = true;
  106. return;
  107. }
  108. //check all child elements
  109. foreach(BnfTermList elemList in nt.Rule.Data)
  110. for(int i = 0; i < elemList.Count; i++) {
  111. BnfTerm child = elemList[i];
  112. if (child == null) {
  113. AddError("Rule for NonTerminal {0} contains null as an operand in position {1} in one of productions.", nt, i);
  114. continue; //for i loop
  115. }
  116. //Check for nested expression - convert to non-terminal
  117. BnfExpression expr = child as BnfExpression;
  118. if (expr != null) {
  119. child = new NonTerminal(null, expr);
  120. elemList[i] = child;
  121. }
  122. CollectAllElementsRecursive(child);
  123. }
  124. }//method
  125. private void AdjustCaseForSymbols() {
  126. if (_grammar.CaseSensitive) return;
  127. foreach (Terminal term in Data.Terminals)
  128. if (term is SymbolTerminal)
  129. term.Key = term.Key.ToLower();
  130. }
  131. private void BuildTerminalsLookupTable() {
  132. Data.TerminalsLookup.Clear();
  133. Data.TerminalsWithoutPrefixes.Clear();
  134. foreach (Terminal term in Data.Terminals) {
  135. IList<string> prefixes = term.GetFirsts();
  136. if (prefixes == null || prefixes.Count == 0) {
  137. Data.TerminalsWithoutPrefixes.Add(term);
  138. continue;
  139. }
  140. //Go through prefixes one-by-one
  141. foreach (string prefix in prefixes) {
  142. if (string.IsNullOrEmpty(prefix)) continue;
  143. //Calculate hash key for the prefix
  144. char hashKey = prefix[0];
  145. if (!_grammar.CaseSensitive)
  146. hashKey = char.ToLower(hashKey);
  147. TerminalList currentList;
  148. if (!Data.TerminalsLookup.TryGetValue(hashKey, out currentList)) {
  149. //if list does not exist yet, create it
  150. currentList = new TerminalList();
  151. Data.TerminalsLookup[hashKey] = currentList;
  152. }
  153. //add terminal to the list
  154. currentList.Add(term);
  155. }
  156. }//foreach term
  157. //Now add _noPrefixTerminals to every list in table
  158. if (Data.TerminalsWithoutPrefixes.Count > 0)
  159. foreach (TerminalList list in Data.TerminalsLookup.Values)
  160. list.AddRange(Data.TerminalsWithoutPrefixes);
  161. //Sort all terminal lists by reverse priority, so that terminal with higher priority comes first in the list
  162. foreach (TerminalList list in Data.TerminalsLookup.Values)
  163. if (list.Count > 1)
  164. list.Sort(Terminal.ByPriorityReverse);
  165. }//method
  166. #endregion
  167. #region Creating Productions
  168. private void CreateProductions() {
  169. Data.Productions.Clear();
  170. //each LR0Item gets its unique ID, last assigned (max) Id is kept in static field
  171. LR0Item._maxID = 0;
  172. foreach(NonTerminal nt in Data.NonTerminals) {
  173. nt.Productions.Clear();
  174. //Get data (sequences) from both Rule and ErrorRule
  175. BnfExpressionData allData = new BnfExpressionData();
  176. allData.AddRange(nt.Rule.Data);
  177. if (nt.ErrorRule != null)
  178. allData.AddRange(nt.ErrorRule.Data);
  179. //actually create productions for each sequence
  180. foreach (BnfTermList prodOperands in allData) {
  181. bool isInitial = (nt == Data.AugmentedRoot);
  182. Production prod = new Production(isInitial, nt, prodOperands);
  183. nt.Productions.Add(prod);
  184. Data.Productions.Add(prod);
  185. }//foreach prodOperands
  186. }
  187. }
  188. #endregion
  189. #region Nullability calculation
  190. private void CalculateNullability() {
  191. NonTerminalList undecided = Data.NonTerminals;
  192. while(undecided.Count > 0) {
  193. NonTerminalList newUndecided = new NonTerminalList();
  194. foreach(NonTerminal nt in undecided)
  195. if (!CalculateNullability(nt, undecided))
  196. newUndecided.Add(nt);
  197. if (undecided.Count == newUndecided.Count) return; //we didn't decide on any new, so we're done
  198. undecided = newUndecided;
  199. }//while
  200. }
  201. private bool CalculateNullability(NonTerminal nonTerminal, NonTerminalList undecided) {
  202. foreach (Production prod in nonTerminal.Productions) {
  203. //If production has terminals, it is not nullable and cannot contribute to nullability
  204. if (prod.HasTerminals) continue;
  205. if (prod.IsEmpty()) {
  206. nonTerminal.Nullable = true;
  207. return true; //Nullable
  208. }//if
  209. //Go thru all elements of production and check nullability
  210. bool allNullable = true;
  211. foreach (BnfTerm term in prod.RValues) {
  212. NonTerminal nt = term as NonTerminal;
  213. if (nt != null)
  214. allNullable &= nt.Nullable;
  215. }//foreach nt
  216. if (allNullable) {
  217. nonTerminal.Nullable = true;
  218. return true;
  219. }
  220. }//foreach prod
  221. return false; //cannot decide
  222. }
  223. #endregion
  224. #region Calculating Firsts
  225. private void CalculateFirsts() {
  226. //1. Calculate PropagateTo lists and put initial terminals into Firsts lists
  227. foreach (Production prod in Data.Productions) {
  228. foreach (BnfTerm term in prod.RValues) {
  229. if (term is Terminal) { //it is terminal, so add it to Firsts and that's all with this production
  230. prod.LValue.Firsts.Add(term.Key); // Add terminal to Firsts (note: Add ignores repetitions)
  231. break; //from foreach term
  232. }//if
  233. NonTerminal nt = term as NonTerminal;
  234. if (!nt.PropagateFirstsTo.Contains(prod.LValue))
  235. nt.PropagateFirstsTo.Add(prod.LValue); //ignores repetitions
  236. if (!nt.Nullable) break; //if not nullable we're done
  237. }//foreach oper
  238. }//foreach prod
  239. //2. Propagate all firsts thru all dependencies
  240. NonTerminalList workList = Data.NonTerminals;
  241. while (workList.Count > 0) {
  242. NonTerminalList newList = new NonTerminalList();
  243. foreach (NonTerminal nt in workList) {
  244. foreach (NonTerminal toNt in nt.PropagateFirstsTo)
  245. foreach (string symbolKey in nt.Firsts) {
  246. if (!toNt.Firsts.Contains(symbolKey)) {
  247. toNt.Firsts.Add(symbolKey);
  248. if (!newList.Contains(toNt))
  249. newList.Add(toNt);
  250. }//if
  251. }//foreach symbolKey
  252. }//foreach nt in workList
  253. workList = newList;
  254. }//while
  255. }//method
  256. #endregion
  257. #region Calculating Tail Firsts
  258. private void CalculateTailFirsts() {
  259. foreach (Production prod in Data.Productions) {
  260. KeyList accumulatedFirsts = new KeyList();
  261. bool allNullable = true;
  262. //We are going backwards in LR0Items list
  263. for(int i = prod.LR0Items.Count-1; i >= 0; i--) {
  264. LR0Item item = prod.LR0Items[i];
  265. if (i >= prod.LR0Items.Count-2) {
  266. //Last and before last items have empty tails
  267. item.TailIsNullable = true;
  268. item.TailFirsts.Clear();
  269. continue;
  270. }
  271. BnfTerm term = prod.RValues[item.Position + 1]; //Element after-after-dot
  272. NonTerminal ntElem = term as NonTerminal;
  273. if (ntElem == null || !ntElem.Nullable) { //term is a terminal or non-nullable NonTerminal
  274. //term is not nullable, so we clear all old firsts and add this term
  275. accumulatedFirsts.Clear();
  276. allNullable = false;
  277. item.TailIsNullable = false;
  278. if (ntElem == null) {
  279. item.TailFirsts.Add(term.Key);//term is terminal so add its key
  280. accumulatedFirsts.Add(term.Key);
  281. } else {
  282. item.TailFirsts.AddRange(ntElem.Firsts); //nonterminal
  283. accumulatedFirsts.AddRange(ntElem.Firsts);
  284. }
  285. continue;
  286. }
  287. //if we are here, then ntElem is a nullable NonTerminal. We add
  288. accumulatedFirsts.AddRange(ntElem.Firsts);
  289. item.TailFirsts.AddRange(accumulatedFirsts);
  290. item.TailIsNullable = allNullable;
  291. }//for i
  292. }//foreach prod
  293. }//method
  294. #endregion
  295. #region Creating parser states
  296. private void CreateParserStates() {
  297. Data.States.Clear();
  298. _stateHash = new ParserStateTable();
  299. //Create initial state
  300. //there is always just one initial production Root' -> Root + LF, and we're interested in LR item at 0 index
  301. LR0ItemList itemList = new LR0ItemList();
  302. itemList.Add(Data.AugmentedRoot.Productions[0].LR0Items[0]);
  303. Data.InitialState = FindOrCreateState(itemList); //it is actually create
  304. Data.InitialState.Items[0].NewLookaheads.Add(Grammar.Eof.Key);
  305. //create final state - we need to create it explicitly to assign to _data.FinalState property
  306. // final state is based on the same initial production, but different LRItem - the one with dot AFTER the root nonterminal.
  307. // it is item at index 1.
  308. itemList = new LR0ItemList();
  309. itemList.Add(Data.AugmentedRoot.Productions[0].LR0Items[1]);
  310. Data.FinalState = FindOrCreateState(itemList);
  311. // Iterate through states (while new ones are created) and create shift transitions and new states
  312. for (int index = 0; index < Data.States.Count; index++) {
  313. ParserState state = Data.States[index];
  314. AddClosureItems(state);
  315. //Get keys of all possible shifts
  316. ShiftTable shiftTable = GetStateShifts(state);
  317. //Each key in shifts dict is an input element
  318. // Value is LR0ItemList of shifted LR0Items for this input element.
  319. foreach (string input in shiftTable.Keys) {
  320. LR0ItemList shiftedCoreItems = shiftTable[input];
  321. ParserState newState = FindOrCreateState(shiftedCoreItems);
  322. state.Actions[input] = new ActionRecord(input,ParserActionType.Shift, newState, null);
  323. //link original LRItems in original state to derived LRItems in newState
  324. foreach (LR0Item coreItem in shiftedCoreItems) {
  325. LRItem fromItem = FindItem(state, coreItem.Production, coreItem.Position - 1);
  326. LRItem toItem = FindItem(newState, coreItem.Production, coreItem.Position);
  327. if (!fromItem.PropagateTargets.Contains(toItem))
  328. fromItem.PropagateTargets.Add(toItem);
  329. }//foreach coreItem
  330. }//foreach input
  331. } //for index
  332. }//method
  333. private string AdjustCase(string key) {
  334. return _grammar.CaseSensitive ? key : key.ToLower();
  335. }
  336. private LRItem TryFindItem(ParserState state, LR0Item core) {
  337. foreach (LRItem item in state.Items)
  338. if (item.Core == core)
  339. return item;
  340. return null;
  341. }//method
  342. private LRItem FindItem(ParserState state, Production production, int position) {
  343. foreach(LRItem item in state.Items)
  344. if (item.Core.Production == production && item.Core.Position == position)
  345. return item;
  346. string msg = string.Format("Failed to find an LRItem in state {0} by production [{1}] and position {2}. ",
  347. state, production.ToString(), position.ToString());
  348. throw new IronyException(msg);
  349. }//method
  350. private ShiftTable GetStateShifts(ParserState state) {
  351. ShiftTable shifts = new ShiftTable();
  352. LR0ItemList list;
  353. foreach (LRItem item in state.Items) {
  354. BnfTerm term = item.Core.NextElement;
  355. if (term == null) continue;
  356. LR0Item shiftedItem = item.Core.Production.LR0Items[item.Core.Position + 1];
  357. if (!shifts.TryGetValue(term.Key, out list))
  358. shifts[term.Key] = list = new LR0ItemList();
  359. list.Add(shiftedItem);
  360. }//foreach
  361. return shifts;
  362. }//method
  363. private ParserState FindOrCreateState(LR0ItemList lr0Items) {
  364. string key = CalcItemListKey(lr0Items);
  365. ParserState result;
  366. if (_stateHash.TryGetValue(key, out result))
  367. return result;
  368. result = new ParserState("S" + Data.States.Count, lr0Items);
  369. Data.States.Add(result);
  370. _stateHash[key] = result;
  371. return result;
  372. }
  373. //Creates closure items with "spontaneously generated" lookaheads
  374. private bool AddClosureItems(ParserState state) {
  375. bool result = false;
  376. //note that we change collection while we iterate thru it, so we have to use "for i" loop
  377. for(int i = 0; i < state.Items.Count; i++) {
  378. LRItem item = state.Items[i];
  379. NonTerminal nextNT = item.Core.NextElement as NonTerminal;
  380. if (nextNT == null) continue;
  381. //1. Add normal closure items
  382. foreach (Production prod in nextNT.Productions) {
  383. LR0Item core = prod.LR0Items[0]; //item at zero index is the one that starts with dot
  384. LRItem newItem = TryFindItem(state, core);
  385. if (newItem == null) {
  386. newItem = new LRItem(core);
  387. state.Items.Add(newItem);
  388. result = true;
  389. }
  390. #region Comments on lookaheads processing
  391. // The general idea of generating ("spontaneously") the lookaheads is the following:
  392. // Let's the original item be in the form
  393. // [A -> alpha . B beta , lset]
  394. // where <B> is a non-terminal and <lset> is a set of lookaheads,
  395. // <beta> is some string (B's tail in our terminology)
  396. // Then the closure item on non-teminal B is an item
  397. // [B -> x, firsts(beta + lset)]
  398. // (the lookahead set is expression after the comma).
  399. // To generate lookaheads on a closure item, we simply take "firsts"
  400. // from the tail <beta> of the NonTerminal <B>.
  401. // Normally if tail <beta> is nullable we would add ("propagate")
  402. // the <lset> lookaheads from <A> to <B>.
  403. // We dont' do it right here - we simply add a propagation link.
  404. // We propagate all lookaheads later in a separate process.
  405. #endregion
  406. newItem.NewLookaheads.AddRange(item.Core.TailFirsts);
  407. if (item.Core.TailIsNullable && !item.PropagateTargets.Contains(newItem))
  408. item.PropagateTargets.Add(newItem);
  409. }//foreach prod
  410. }//for i (LRItem)
  411. return result;
  412. }
  413. #region comments
  414. //Parser states are distinguished by the subset of kernel LR0 items.
  415. // So when we derive new LR0-item list by shift operation,
  416. // we need to find out if we have already a state with the same LR0Item list.
  417. // We do it by looking up in a state hash by a key - [LR0 item list key].
  418. // Each list's key is a concatenation of items' IDs separated by ','.
  419. // Before producing the key for a list, the list must be sorted;
  420. // thus we garantee one-to-one correspondence between LR0Item sets and keys.
  421. // And of course, we count only kernel items (with dot NOT in the first position).
  422. #endregion
  423. private string CalcItemListKey(LR0ItemList items) {
  424. items.Sort(ById); //Sort by ID
  425. if (items.Count == 0) return "";
  426. //quick shortcut
  427. if (items.Count == 1 && items[0].IsKernel)
  428. return items[0].ID.ToString();
  429. StringBuilder sb = new StringBuilder(1024);
  430. foreach (LR0Item item in items) {
  431. if (item.IsKernel) {
  432. sb.Append(item.ID);
  433. sb.Append(",");
  434. }
  435. }//foreach
  436. return sb.ToString();
  437. }
  438. private static int ById(LR0Item x, LR0Item y) {
  439. if (x.ID < y.ID) return -1;
  440. if (x.ID == y.ID) return 0;
  441. return 1;
  442. }
  443. #endregion
  444. #region Lookaheads propagation
  445. private void PropagateLookaheads() {
  446. LRItemList currentList = new LRItemList();
  447. //first collect all items
  448. foreach (ParserState state in Data.States)
  449. currentList.AddRange(state.Items);
  450. //Main loop - propagate until done
  451. while (currentList.Count > 0) {
  452. LRItemList newList = new LRItemList();
  453. foreach (LRItem item in currentList) {
  454. if (item.NewLookaheads.Count == 0) continue;
  455. int oldCount = item.Lookaheads_.Count;
  456. item.Lookaheads_.AddRange(item.NewLookaheads);
  457. if (item.Lookaheads_.Count != oldCount) {
  458. foreach (LRItem targetItem in item.PropagateTargets) {
  459. targetItem.NewLookaheads.AddRange(item.NewLookaheads);
  460. newList.Add(targetItem);
  461. }//foreach targetItem
  462. }//if
  463. item.NewLookaheads.Clear();
  464. }//foreach item
  465. currentList = newList;
  466. }//while
  467. }//method
  468. #endregion
  469. #region Final actions: createReduceActions
  470. private void CreateReduceActions() {
  471. foreach(ParserState state in Data.States) {
  472. foreach (LRItem item in state.Items) {
  473. //we are interested only in "dot at the end" items
  474. if (item.Core.NextElement != null) continue;
  475. foreach (string lookahead in item.Lookaheads_) {
  476. ActionRecord action;
  477. if (state.Actions.TryGetValue(lookahead, out action))
  478. action.ReduceProductions.Add(item.Core.Production);
  479. else
  480. state.Actions[lookahead] = new ActionRecord(lookahead, ParserActionType.Reduce, null, item.Core.Production);
  481. }//foreach lookahead
  482. }//foreach item
  483. }// foreach state
  484. } //method
  485. #endregion
  486. #region Check for shift-reduce conflicts
  487. private void CheckActionConflicts() {
  488. StringDictionary errorTable = new StringDictionary();
  489. foreach (ParserState state in Data.States) {
  490. foreach (ActionRecord action in state.Actions.Values) {
  491. //1. Pure shift
  492. if (action.NewState != null && action.ReduceProductions.Count == 0)
  493. continue; //ActionType is shift by default
  494. //2. Pure reduce
  495. if (action.NewState == null && action.ReduceProductions.Count == 1) {
  496. action.ActionType = ParserActionType.Reduce;
  497. continue;
  498. }
  499. //3. Shift-reduce conflict
  500. if (action.NewState != null && action.ReduceProductions.Count > 0) {
  501. //it might be an operation, with resolution by precedence/associativity
  502. SymbolTerminal opTerm = SymbolTerminal.GetSymbol(action.Key);
  503. if (opTerm != null && opTerm.IsSet(TermOptions.IsOperator)) {
  504. action.ActionType = ParserActionType.Operator;
  505. } else {
  506. AddErrorForInput(errorTable, action.Key, "Shift-reduce conflict in state {0}, reduce production: {1}",
  507. state, action.ReduceProductions[0]);
  508. //NOTE: don't do "continue" here, we need to proceed to reduce-reduce conflict check
  509. }//if...else
  510. }//if action....
  511. //4. Reduce-reduce conflicts
  512. if (action.ReduceProductions.Count > 1) {
  513. AddErrorForInput(errorTable, action.Key, "Reduce-reduce conflict in state {0} in productions: {1} ; {2}",
  514. state, action.ReduceProductions[0], action.ReduceProductions[1]);
  515. }
  516. }//foreach action
  517. }//foreach state
  518. //copy errors to Errors collection; In errorTable keys are error messages, values are inputs for this message
  519. foreach (string msg in errorTable.Keys) {
  520. Data.Errors.Add(msg + " on inputs: " + errorTable[msg]);
  521. }
  522. }//methods
  523. //Aggregate error messages for different inputs (lookaheads) in errors dictionary
  524. private void AddErrorForInput(StringDictionary errors, string input, string template, params object[] args) {
  525. string msg = string.Format(template, args);
  526. string tmpInputs;
  527. errors.TryGetValue(msg, out tmpInputs);
  528. errors[msg] = tmpInputs + input + " ";
  529. }
  530. private bool ContainsProduction(ProductionList productions, NonTerminal nonTerminal) {
  531. foreach (Production prod in productions)
  532. if (prod.LValue == nonTerminal) return true;
  533. return false;
  534. }
  535. #endregion
  536. #region Initialize elements
  537. private void InitAll() {
  538. foreach (Terminal term in Data.Terminals)
  539. term.Init(_grammar);
  540. foreach (NonTerminal nt in Data.NonTerminals)
  541. nt.Init(_grammar);
  542. foreach (TokenFilter filter in _grammar.TokenFilters)
  543. filter.Init(_grammar);
  544. }
  545. #endregion
  546. private void ValidateAll() {
  547. //Check rule on all non-terminals
  548. KeyList ntList = new KeyList();
  549. foreach(NonTerminal nt in Data.NonTerminals) {
  550. if (nt == Data.AugmentedRoot) continue; //augm root does not count
  551. BnfExpressionData data = nt.Rule.Data;
  552. if (data.Count == 1 && data[0].Count == 1 && data[0][0] is NonTerminal)
  553. ntList.Add(nt.Name);
  554. }//foreach
  555. if (ntList.Count > 0)
  556. AddError("Warning: Possible non-terminal duplication. The following non-terminals have rules containing a single non-terminal: \r\n {0}. \r\n" +
  557. "Consider merging two non-terminals; you may need to use 'nt1 = nt2;' instead of 'nt1.Rule=nt2'.", ntList.ToString(", "));
  558. }
  559. #region error handling: AddError
  560. private void AddError(string message, params object[] args) {
  561. if (args != null && args.Length > 0)
  562. message = string.Format(message, args);
  563. Data.Errors.Add(message);
  564. }
  565. #endregion
  566. }//class
  567. }//namespace