PageRenderTime 34ms CodeModel.GetById 2ms app.highlight 24ms RepoModel.GetById 2ms app.codeStats 0ms

/Raven.Database/Indexing/MapReduceIndex.cs

Relevant Search: With Applications for Solr and Elasticsearch

'Chapter 4. Taming tokens'. If you want to know how to extract ideas rather than words this book is for you. Learn concepts of precision and recall, making trade-offs between them and controlling the specificity of matches. Amazon Affiliate Link
https://github.com/nwendel/ravendb
C# | 663 lines | 590 code | 60 blank | 13 comment | 60 complexity | 0bd1ec4164fd1dcc8c95459b68c13691 MD5 | raw file
  1//-----------------------------------------------------------------------
  2// <copyright file="MapReduceIndex.cs" company="Hibernating Rhinos LTD">
  3//     Copyright (c) Hibernating Rhinos LTD. All rights reserved.
  4// </copyright>
  5//-----------------------------------------------------------------------
  6using System;
  7using System.Collections;
  8using System.Collections.Concurrent;
  9using System.Collections.Generic;
 10using System.ComponentModel;
 11using System.Diagnostics;
 12using System.Globalization;
 13using System.Linq;
 14using System.Text;
 15using System.Threading;
 16using Lucene.Net.Analysis;
 17using Lucene.Net.Documents;
 18using Lucene.Net.Index;
 19using Lucene.Net.Search;
 20using Lucene.Net.Store;
 21using Raven.Abstractions.Logging;
 22using Raven.Database.Extensions;
 23using Raven.Database.Plugins;
 24using Raven.Imports.Newtonsoft.Json;
 25using Raven.Abstractions;
 26using Raven.Abstractions.Extensions;
 27using Raven.Abstractions.Data;
 28using Raven.Abstractions.Indexing;
 29using Raven.Abstractions.Linq;
 30using Raven.Database.Data;
 31using Raven.Database.Linq;
 32using Raven.Database.Storage;
 33using Raven.Imports.Newtonsoft.Json.Linq;
 34using Raven.Json.Linq;
 35using Spatial4n.Core.Exceptions;
 36
 37namespace Raven.Database.Indexing
 38{
 39	public class MapReduceIndex : Index
 40	{
 41		readonly JsonSerializer jsonSerializer;
 42
 43		private class IgnoreFieldable : JsonConverter
 44		{
 45			public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
 46			{
 47				writer.WriteValue("IgnoredLuceueField");
 48			}
 49
 50			public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
 51			{
 52				return null;
 53			}
 54
 55			public override bool CanConvert(Type objectType)
 56			{
 57				return typeof(IFieldable).IsAssignableFrom(objectType) ||
 58					   typeof(IEnumerable<AbstractField>).IsAssignableFrom(objectType);
 59			}
 60		}
 61
 62        public MapReduceIndex(Directory directory, int id, IndexDefinition indexDefinition,
 63							  AbstractViewGenerator viewGenerator, WorkContext context)
 64            : base(directory, id, indexDefinition, viewGenerator, context)
 65		{
 66			jsonSerializer = new JsonSerializer();
 67			foreach (var jsonConverter in Default.Converters)
 68			{
 69				jsonSerializer.Converters.Add(jsonConverter);
 70			}
 71			jsonSerializer.Converters.Add(new IgnoreFieldable());
 72		}
 73
 74		public override bool IsMapReduce
 75		{
 76			get { return true; }
 77		}
 78
 79		public override void IndexDocuments(
 80			AbstractViewGenerator viewGenerator,
 81			IndexingBatch batch,
 82			IStorageActionsAccessor actions,
 83			DateTime minimumTimestamp)
 84		{
 85			var count = 0;
 86			var sourceCount = 0;
 87			var sw = Stopwatch.StartNew();
 88			var start = SystemTime.UtcNow;
 89			var deleted = new Dictionary<ReduceKeyAndBucket, int>();
 90			RecordCurrentBatch("Current Map", batch.Docs.Count);
 91			var documentsWrapped = batch.Docs.Select(doc =>
 92			{
 93				sourceCount++;
 94				var documentId = doc.__document_id;
 95				actions.MapReduce.DeleteMappedResultsForDocumentId((string)documentId, indexId, deleted);
 96				return doc;
 97			})
 98				.Where(x => x is FilteredDocument == false)
 99				.ToList();
100			var allReferencedDocs = new ConcurrentQueue<IDictionary<string, HashSet<string>>>();
101			var allReferenceEtags = new ConcurrentQueue<IDictionary<string, Etag>>();
102			var allState = new ConcurrentQueue<Tuple<HashSet<ReduceKeyAndBucket>, IndexingWorkStats, Dictionary<string, int>>>();
103			BackgroundTaskExecuter.Instance.ExecuteAllBuffered(context, documentsWrapped, partition =>
104			{
105				var localStats = new IndexingWorkStats();
106				var localChanges = new HashSet<ReduceKeyAndBucket>();
107				var statsPerKey = new Dictionary<string, int>();
108				allState.Enqueue(Tuple.Create(localChanges, localStats, statsPerKey));
109
110                using (CurrentIndexingScope.Current = new CurrentIndexingScope(context.Database,PublicName))
111				{
112					// we are writing to the transactional store from multiple threads here, and in a streaming fashion
113					// should result in less memory and better perf
114					context.TransactionalStorage.Batch(accessor =>
115					{
116						var mapResults = RobustEnumerationIndex(partition, viewGenerator.MapDefinitions, localStats);
117						var currentDocumentResults = new List<object>();
118						string currentKey = null;
119						foreach (var currentDoc in mapResults)
120						{
121							var documentId = GetDocumentId(currentDoc);
122							if (documentId != currentKey)
123							{
124								count += ProcessBatch(viewGenerator, currentDocumentResults, currentKey, localChanges, accessor, statsPerKey);
125								currentDocumentResults.Clear();
126								currentKey = documentId;
127							}
128							currentDocumentResults.Add(new DynamicJsonObject(RavenJObject.FromObject(currentDoc, jsonSerializer)));
129
130							EnsureValidNumberOfOutputsForDocument(documentId, currentDocumentResults.Count);
131
132							Interlocked.Increment(ref localStats.IndexingSuccesses);
133						}
134						count += ProcessBatch(viewGenerator, currentDocumentResults, currentKey, localChanges, accessor, statsPerKey);
135					});
136					allReferenceEtags.Enqueue(CurrentIndexingScope.Current.ReferencesEtags);
137					allReferencedDocs.Enqueue(CurrentIndexingScope.Current.ReferencedDocuments);
138				}
139			});
140
141
142
143			UpdateDocumentReferences(actions, allReferencedDocs, allReferenceEtags);
144
145		    var changed = allState.SelectMany(x => x.Item1).Concat(deleted.Keys)
146					.Distinct()
147					.ToList();
148
149			var stats = new IndexingWorkStats(allState.Select(x => x.Item2));
150			var reduceKeyStats = allState.SelectMany(x => x.Item3)
151										 .GroupBy(x => x.Key)
152										 .Select(g => new { g.Key, Count = g.Sum(x => x.Value) })
153										 .ToList();
154
155			BackgroundTaskExecuter.Instance.ExecuteAllBuffered(context, reduceKeyStats, enumerator => context.TransactionalStorage.Batch(accessor =>
156			{
157				while (enumerator.MoveNext())
158				{
159					var reduceKeyStat = enumerator.Current;
160                    accessor.MapReduce.IncrementReduceKeyCounter(indexId, reduceKeyStat.Key, reduceKeyStat.Count);
161				}
162			}));
163
164			BackgroundTaskExecuter.Instance.ExecuteAllBuffered(context, changed, enumerator => context.TransactionalStorage.Batch(accessor =>
165			{
166				while (enumerator.MoveNext())
167				{
168                    accessor.MapReduce.ScheduleReductions(indexId, 0, enumerator.Current);
169				}
170			}));
171
172
173			UpdateIndexingStats(context, stats);
174			AddindexingPerformanceStat(new IndexingPerformanceStats
175			{
176				OutputCount = count,
177				ItemsCount = sourceCount,
178				InputCount = documentsWrapped.Count,
179				Operation = "Map",
180				Duration = sw.Elapsed,
181				Started = start
182			});
183			BatchCompleted("Current Map");
184            logIndexing.Debug("Mapped {0} documents for {1}", count, indexId);
185		}
186
187	    private int ProcessBatch(AbstractViewGenerator viewGenerator, List<object> currentDocumentResults, string currentKey, HashSet<ReduceKeyAndBucket> changes,
188			IStorageActionsAccessor actions,
189			IDictionary<string, int> statsPerKey)
190		{
191			if (currentKey == null || currentDocumentResults.Count == 0)
192				return 0;
193	        var old = CurrentIndexingScope.Current;
194	        try
195	        {
196                CurrentIndexingScope.Current = null;
197
198	            if (logIndexing.IsDebugEnabled)
199	            {
200	                var sb = new StringBuilder()
201	                    .AppendFormat("Index {0} for document {1} resulted in:", PublicName, currentKey)
202	                    .AppendLine();
203	                foreach (var currentDocumentResult in currentDocumentResults)
204	                {
205	                    sb.AppendLine(JsonConvert.SerializeObject(currentDocumentResult));
206	                }
207	                logIndexing.Debug(sb.ToString());
208	            }
209
210			int count = 0;
211			var results = RobustEnumerationReduceDuringMapPhase(currentDocumentResults.GetEnumerator(), viewGenerator.ReduceDefinition);
212			foreach (var doc in results)
213			{
214				count++;
215
216				var reduceValue = viewGenerator.GroupByExtraction(doc);
217				if (reduceValue == null)
218				{
219					logIndexing.Debug("Field {0} is used as the reduce key and cannot be null, skipping document {1}",
220									  viewGenerator.GroupByExtraction, currentKey);
221					continue;
222				}
223				string reduceKey = ReduceKeyToString(reduceValue);
224
225				var data = GetMappedData(doc);
226
227	                logIndexing.Debug("Index {0} for document {1} resulted in ({2}): {3}", PublicName, currentKey, reduceKey, data);
228				actions.MapReduce.PutMappedResult(indexId, currentKey, reduceKey, data);
229				statsPerKey[reduceKey] = statsPerKey.GetOrDefault(reduceKey) + 1;
230				actions.General.MaybePulseTransaction();
231				changes.Add(new ReduceKeyAndBucket(IndexingUtil.MapBucket(currentKey), reduceKey));
232			}
233			return count;
234		}
235	        finally 
236	        {
237	            CurrentIndexingScope.Current = old;
238	        }
239		}
240
241		private RavenJObject GetMappedData(object doc)
242		{
243			if (doc is IDynamicJsonObject)
244				return ((IDynamicJsonObject)doc).Inner;
245
246			var ravenJTokenWriter = new RavenJTokenWriter();
247			jsonSerializer.Serialize(ravenJTokenWriter, doc);
248			return (RavenJObject)ravenJTokenWriter.Token;
249		}
250
251		private static readonly ConcurrentDictionary<Type, Func<object, object>> documentIdFetcherCache =
252			new ConcurrentDictionary<Type, Func<object, object>>();
253
254		private static string GetDocumentId(object doc)
255		{
256			var docIdFetcher = documentIdFetcherCache.GetOrAdd(doc.GetType(), type =>
257			{
258				// document may be DynamicJsonObject if we are using compiled views
259				if (typeof(DynamicJsonObject) == type)
260				{
261					return i => ((dynamic)i).__document_id;
262				}
263				var docIdProp = TypeDescriptor.GetProperties(doc).Find(Constants.DocumentIdFieldName, false);
264				return docIdProp.GetValue;
265			});
266			if (docIdFetcher == null)
267				throw new InvalidOperationException("Could not create document id fetcher for this document");
268			var documentId = docIdFetcher(doc);
269			if (documentId == null || documentId is DynamicNullObject)
270				throw new InvalidOperationException("Could not getdocument id fetcher for this document");
271
272			return (string)documentId;
273		}
274
275		internal static string ReduceKeyToString(object reduceValue)
276		{
277			if (reduceValue is string)
278			{
279				return reduceValue.ToString();
280			}
281			if (reduceValue is DateTime)
282                return ((DateTime)reduceValue).ToString(Default.DateTimeFormatsToWrite);
283			if (reduceValue is DateTimeOffset)
284				return ((DateTimeOffset)reduceValue).ToString(Default.DateTimeFormatsToWrite, CultureInfo.InvariantCulture);
285			if (reduceValue is ValueType)
286				return reduceValue.ToString();
287
288			var dynamicJsonObject = reduceValue as IDynamicJsonObject;
289			if (dynamicJsonObject != null)
290				return dynamicJsonObject.Inner.ToString(Formatting.None);
291			return RavenJToken.FromObject(reduceValue).ToString(Formatting.None);
292		}
293
294		protected override IndexQueryResult RetrieveDocument(Document document, FieldsToFetch fieldsToFetch, ScoreDoc score)
295		{
296			fieldsToFetch.EnsureHasField(Constants.ReduceKeyFieldName);
297			if (fieldsToFetch.IsProjection)
298			{
299				return base.RetrieveDocument(document, fieldsToFetch, score);
300			}
301			var field = document.GetField(Constants.ReduceValueFieldName);
302			if (field == null)
303			{
304				fieldsToFetch = fieldsToFetch.CloneWith(document.GetFields().Select(x => x.Name).ToArray());
305				return base.RetrieveDocument(document, fieldsToFetch, score);
306			}
307			return new IndexQueryResult
308			{
309				Projection = RavenJObject.Parse(field.StringValue),
310				Score = score.Score
311			};
312		}
313
314		protected override void HandleCommitPoints(IndexedItemsInfo itemsInfo, IndexSegmentsInfo segmentsInfo)
315		{
316			// MapReduce index does not store and use any commit points
317		}
318
319		protected override bool IsUpToDateEnoughToWriteToDisk(Etag highestETag)
320		{
321			// for map/reduce indexes, we always write to disk, the in memory optimization
322			// isn't really doing much for us, since we already write the intermediate results 
323			// to disk anyway, so it doesn't matter
324			return true;
325		}
326
327		public override void Remove(string[] keys, WorkContext context)
328		{
329			context.TransactionalStorage.Batch(actions =>
330			{
331				var reduceKeyAndBuckets = new Dictionary<ReduceKeyAndBucket, int>();
332				foreach (var key in keys)
333				{
334                    actions.MapReduce.DeleteMappedResultsForDocumentId(key, indexId, reduceKeyAndBuckets);
335				}
336
337                actions.MapReduce.UpdateRemovedMapReduceStats(indexId, reduceKeyAndBuckets);
338				foreach (var reduceKeyAndBucket in reduceKeyAndBuckets)
339				{
340                    actions.MapReduce.ScheduleReductions(indexId, 0, reduceKeyAndBucket.Key);
341				}
342			});
343			Write((writer, analyzer, stats) =>
344			{
345				stats.Operation = IndexingWorkStats.Status.Ignore;
346				logIndexing.Debug(() => string.Format("Deleting ({0}) from {1}", string.Join(", ", keys), PublicName));
347				writer.DeleteDocuments(keys.Select(k => new Term(Constants.ReduceKeyFieldName, k.ToLowerInvariant())).ToArray());
348				return new IndexedItemsInfo(null)
349				{
350					ChangedDocs = keys.Length
351				};
352			});
353		}
354
355		public class ReduceDocuments
356		{
357			private readonly MapReduceIndex parent;
358			private readonly int inputCount;
359            private readonly int indexId;
360			readonly AnonymousObjectToLuceneDocumentConverter anonymousObjectToLuceneDocumentConverter;
361			private readonly Document luceneDoc = new Document();
362			private readonly Field reduceValueField = new Field(Constants.ReduceValueFieldName, "dummy",
363													 Field.Store.YES, Field.Index.NO);
364
365			private readonly Field reduceKeyField = new Field(Constants.ReduceKeyFieldName, "dummy",
366													 Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS);
367			private PropertyDescriptorCollection properties = null;
368			private readonly List<AbstractIndexUpdateTriggerBatcher> batchers;
369
370			public ReduceDocuments(MapReduceIndex parent, AbstractViewGenerator viewGenerator, IEnumerable<IGrouping<int, object>> mappedResultsByBucket, int level, WorkContext context, IStorageActionsAccessor actions, HashSet<string> reduceKeys, int inputCount)
371			{
372				this.parent = parent;
373				this.inputCount = inputCount;
374                indexId = this.parent.indexId;
375				ViewGenerator = viewGenerator;
376				MappedResultsByBucket = mappedResultsByBucket;
377				Level = level;
378				Context = context;
379				Actions = actions;
380				ReduceKeys = reduceKeys;
381
382                anonymousObjectToLuceneDocumentConverter = new AnonymousObjectToLuceneDocumentConverter(this.parent.context.Database, this.parent.indexDefinition, ViewGenerator, logIndexing);
383
384				if (Level == 2)
385				{
386                    batchers = Context.IndexUpdateTriggers.Select(x => x.CreateBatcher(indexId))
387								.Where(x => x != null)
388								.ToList();
389				}
390			}
391
392			public AbstractViewGenerator ViewGenerator { get; private set; }
393			public IEnumerable<IGrouping<int, object>> MappedResultsByBucket { get; private set; }
394			public int Level { get; private set; }
395			public WorkContext Context { get; private set; }
396			public IStorageActionsAccessor Actions { get; private set; }
397			public HashSet<string> ReduceKeys { get; private set; }
398
399			private string ExtractReduceKey(AbstractViewGenerator viewGenerator, object doc)
400			{
401				try
402				{
403					object reduceKey = viewGenerator.GroupByExtraction(doc);
404					if (reduceKey == null)
405					{
406                        throw new InvalidOperationException("Could not find reduce key for " + indexId + " in the result: " + doc);
407					}
408					return ReduceKeyToString(reduceKey);
409				}
410				catch (Exception e)
411				{
412					throw new InvalidOperationException("Could not extract reduce key from reduce result!", e);
413				}
414			}
415
416			private IEnumerable<AbstractField> GetFields(object doc, out float boost)
417			{
418				boost = 1;
419				var boostedValue = doc as BoostedValue;
420				if (boostedValue != null)
421				{
422					doc = boostedValue.Value;
423					boost = boostedValue.Boost;
424				}
425				IEnumerable<AbstractField> fields = null;
426
427				try
428				{
429					if (doc is IDynamicJsonObject)
430					{
431						fields = anonymousObjectToLuceneDocumentConverter.Index(((IDynamicJsonObject)doc).Inner, Field.Store.NO);
432					}
433					else
434					{
435						properties = properties ?? TypeDescriptor.GetProperties(doc);
436						fields = anonymousObjectToLuceneDocumentConverter.Index(doc, properties, Field.Store.NO);
437					}
438				}
439				catch (InvalidShapeException)
440				{
441				}
442
443				if (Math.Abs(boost - 1) > float.Epsilon)
444				{
445					var abstractFields = fields.ToList();
446					foreach (var abstractField in abstractFields)
447					{
448						abstractField.OmitNorms = false;
449					}
450					return abstractFields;
451				}
452				return fields;
453			}
454
455			private static RavenJObject ToJsonDocument(object doc)
456			{
457				var boostedValue = doc as BoostedValue;
458				if (boostedValue != null)
459				{
460					doc = boostedValue.Value;
461				}
462				var dynamicJsonObject = doc as IDynamicJsonObject;
463				if (dynamicJsonObject != null)
464				{
465					return dynamicJsonObject.Inner;
466				}
467				var ravenJObject = doc as RavenJObject;
468				if (ravenJObject != null)
469					return ravenJObject;
470				var jsonDocument = RavenJObject.FromObject(doc);
471				MergeArrays(jsonDocument);
472
473				// remove _, __, etc fields
474				foreach (var prop in jsonDocument.Where(x => x.Key.All(ch => ch == '_')).ToArray())
475				{
476					jsonDocument.Remove(prop.Key);
477				}
478				return jsonDocument;
479			}
480
481			private static void MergeArrays(RavenJToken token)
482			{
483				if (token == null)
484					return;
485				switch (token.Type)
486				{
487					case JTokenType.Array:
488						var arr = (RavenJArray)token;
489						for (int i = 0; i < arr.Length; i++)
490						{
491							var current = arr[i];
492							if (current == null || current.Type != JTokenType.Array)
493								continue;
494							arr.RemoveAt(i);
495							i--;
496							var j = Math.Max(0, i);
497							foreach (var item in (RavenJArray)current)
498							{
499								arr.Insert(j++, item);
500							}
501						}
502						break;
503					case JTokenType.Object:
504						foreach (var kvp in ((RavenJObject)token))
505						{
506							MergeArrays(kvp.Value);
507						}
508						break;
509				}
510			}
511
512			public void ExecuteReduction()
513			{
514				var count = 0;
515				var sourceCount = 0;
516				var sw = Stopwatch.StartNew();
517				var start = SystemTime.UtcNow;
518
519				parent.Write((indexWriter, analyzer, stats) =>
520				{
521					stats.Operation = IndexingWorkStats.Status.Reduce;
522					try
523					{
524						parent.RecordCurrentBatch("Current Reduce #" + Level, MappedResultsByBucket.Sum(x => x.Count()));
525						if (Level == 2)
526						{
527							RemoveExistingReduceKeysFromIndex(indexWriter);
528						}
529						foreach (var mappedResults in MappedResultsByBucket)
530						{
531							var input = mappedResults.Select(x =>
532							{
533								sourceCount++;
534								return x;
535							});
536							foreach (var doc in parent.RobustEnumerationReduce(input.GetEnumerator(), ViewGenerator.ReduceDefinition, Actions, stats))
537							{
538								count++;
539								string reduceKeyAsString = ExtractReduceKey(ViewGenerator, doc);
540
541								switch (Level)
542								{
543									case 0:
544									case 1:
545                                        Actions.MapReduce.PutReducedResult(indexId, reduceKeyAsString, Level + 1, mappedResults.Key, mappedResults.Key / 1024, ToJsonDocument(doc));
546										Actions.General.MaybePulseTransaction();
547										break;
548									case 2:
549										WriteDocumentToIndex(doc, indexWriter, analyzer);
550										break;
551									default:
552										throw new InvalidOperationException("Unknown level: " + Level);
553								}
554								stats.ReduceSuccesses++;
555							}
556						}
557					}
558					catch (Exception e)
559					{
560						if (Level == 2)
561						{
562							batchers.ApplyAndIgnoreAllErrors(
563								ex =>
564								{
565									logIndexing.WarnException("Failed to notify index update trigger batcher about an error", ex);
566                                    Context.AddError(indexId, parent.indexDefinition.Name, null, ex.Message, "AnErrorOccured Trigger");
567								},
568								x => x.AnErrorOccured(e));
569						}
570						throw;
571					}
572					finally
573					{
574						if (Level == 2)
575						{
576							batchers.ApplyAndIgnoreAllErrors(
577								e =>
578								{
579									logIndexing.WarnException("Failed to dispose on index update trigger", e);
580                                    Context.AddError(indexId, parent.indexDefinition.Name, null, e.Message, "Dispose Trigger");
581								},
582								x => x.Dispose());
583						}
584						parent.BatchCompleted("Current Reduce #" + Level);
585					}
586
587					return new IndexedItemsInfo(null)
588					{
589						ChangedDocs = count + ReduceKeys.Count
590					};
591				});
592				parent.AddindexingPerformanceStat(new IndexingPerformanceStats
593				{
594					OutputCount = count,
595					ItemsCount = sourceCount,
596					InputCount = inputCount,
597					Duration = sw.Elapsed,
598					Operation = "Reduce Level " + Level,
599					Started = start
600				});
601                logIndexing.Debug(() => string.Format("Reduce resulted in {0} entries for {1} for reduce keys: {2}", count, indexId, string.Join(", ", ReduceKeys)));
602			}
603
604			private void WriteDocumentToIndex(object doc, RavenIndexWriter indexWriter, Analyzer analyzer)
605			{
606				float boost;
607				List<AbstractField> fields;
608			    try
609			    {
610			        fields = GetFields(doc, out boost).ToList();
611			    }
612			    catch (Exception e)
613			    {
614                    Context.AddError(indexId,
615                        parent.PublicName,
616                        TryGetDocKey(doc),
617                        e.Message,
618                        "Reduce"
619                        );
620			        logIndexing.WarnException("Could not get fields to during reduce for " + parent.PublicName, e);
621			        return;
622			    }
623
624				string reduceKeyAsString = ExtractReduceKey(ViewGenerator, doc);
625				reduceKeyField.SetValue(reduceKeyAsString);
626				reduceValueField.SetValue(ToJsonDocument(doc).ToString(Formatting.None));
627				luceneDoc.GetFields().Clear();
628				luceneDoc.Boost = boost;
629				luceneDoc.Add(reduceKeyField);
630				luceneDoc.Add(reduceValueField);
631				foreach (var field in fields)
632				{
633					luceneDoc.Add(field);
634				}
635
636				batchers.ApplyAndIgnoreAllErrors(
637					exception =>
638					{
639						logIndexing.WarnException(
640							string.Format("Error when executed OnIndexEntryCreated trigger for index '{0}', key: '{1}'",
641                                          indexId, reduceKeyAsString),
642							exception);
643                        Context.AddError(indexId, parent.PublicName, reduceKeyAsString, exception.Message, "OnIndexEntryCreated Trigger");
644					},
645					trigger => trigger.OnIndexEntryCreated(reduceKeyAsString, luceneDoc));
646
647				parent.LogIndexedDocument(reduceKeyAsString, luceneDoc);
648
649				parent.AddDocumentToIndex(indexWriter, luceneDoc, analyzer);
650			}
651
652			private void RemoveExistingReduceKeysFromIndex(RavenIndexWriter indexWriter)
653			{
654				foreach (var reduceKey in ReduceKeys)
655				{
656					var entryKey = reduceKey;
657				    parent.InvokeOnIndexEntryDeletedOnAllBatchers(batchers, new Term(Constants.ReduceKeyFieldName, entryKey));
658					indexWriter.DeleteDocuments(new Term(Constants.ReduceKeyFieldName, entryKey));
659				}
660			}
661		}
662	}
663}