Update mcs/class/Commons.Xml.Relaxng/Commons.Xml.Relaxng/RelaxngPattern.cs
[mono.git] / mcs / class / System.Threading.Tasks.Dataflow / System.Threading.Tasks.Dataflow / BatchBlock.cs
1 // BatchBlock.cs
2 //
3 // Copyright (c) 2011 Jérémie "garuma" Laval
4 //
5 // Permission is hereby granted, free of charge, to any person obtaining a copy
6 // of this software and associated documentation files (the "Software"), to deal
7 // in the Software without restriction, including without limitation the rights
8 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 // copies of the Software, and to permit persons to whom the Software is
10 // furnished to do so, subject to the following conditions:
11 //
12 // The above copyright notice and this permission notice shall be included in
13 // all copies or substantial portions of the Software.
14 //
15 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 // THE SOFTWARE.
22 //
23 //
24
25
26 using System;
27 using System.Threading.Tasks;
28 using System.Collections.Generic;
29 using System.Collections.Concurrent;
30
31 namespace System.Threading.Tasks.Dataflow
32 {
33         public sealed class BatchBlock<T> : IPropagatorBlock<T, T[]>, ITargetBlock<T>, IDataflowBlock, ISourceBlock<T[]>, IReceivableSourceBlock<T[]>
34         {
35                 static readonly DataflowBlockOptions defaultOptions = new DataflowBlockOptions ();
36
37                 CompletionHelper compHelper = CompletionHelper.GetNew ();
38                 BlockingCollection<T> messageQueue = new BlockingCollection<T> ();
39                 MessageBox<T> messageBox;
40                 MessageVault<T[]> vault;
41                 DataflowBlockOptions dataflowBlockOptions;
42                 readonly int batchSize;
43                 int batchCount;
44                 MessageOutgoingQueue<T[]> outgoing;
45                 TargetBuffer<T[]> targets = new TargetBuffer<T[]> ();
46                 DataflowMessageHeader headers = DataflowMessageHeader.NewValid ();
47
48                 public BatchBlock (int batchSize) : this (batchSize, defaultOptions)
49                 {
50
51                 }
52
53                 public BatchBlock (int batchSize, DataflowBlockOptions dataflowBlockOptions)
54                 {
55                         if (dataflowBlockOptions == null)
56                                 throw new ArgumentNullException ("dataflowBlockOptions");
57
58                         this.batchSize = batchSize;
59                         this.dataflowBlockOptions = dataflowBlockOptions;
60                         this.messageBox = new PassingMessageBox<T> (messageQueue, compHelper, () => outgoing.IsCompleted, BatchProcess, dataflowBlockOptions);
61                         this.outgoing = new MessageOutgoingQueue<T[]> (compHelper, () => messageQueue.IsCompleted);
62                         this.vault = new MessageVault<T[]> ();
63                 }
64
65                 public DataflowMessageStatus OfferMessage (DataflowMessageHeader messageHeader,
66                                                            T messageValue,
67                                                            ISourceBlock<T> source,
68                                                            bool consumeToAccept)
69                 {
70                         return messageBox.OfferMessage (this, messageHeader, messageValue, source, consumeToAccept);
71                 }
72
73                 public IDisposable LinkTo (ITargetBlock<T[]> target, bool unlinkAfterOne)
74                 {
75                         var result = targets.AddTarget (target, unlinkAfterOne);
76                         outgoing.ProcessForTarget (target, this, false, ref headers);
77
78                         return result;
79                 }
80
81                 public T[] ConsumeMessage (DataflowMessageHeader messageHeader, ITargetBlock<T[]> target, out bool messageConsumed)
82                 {
83                         return vault.ConsumeMessage (messageHeader, target, out messageConsumed);
84                 }
85
86                 public void ReleaseReservation (DataflowMessageHeader messageHeader, ITargetBlock<T[]> target)
87                 {
88                         vault.ReleaseReservation (messageHeader, target);
89                 }
90
91                 public bool ReserveMessage (DataflowMessageHeader messageHeader, ITargetBlock<T[]> target)
92                 {
93                         return vault.ReserveMessage (messageHeader, target);
94                 }
95
96                 public bool TryReceive (Predicate<T[]> filter, out T[] item)
97                 {
98                         return TryReceive (filter, out item);
99                 }
100
101                 public bool TryReceiveAll (out IList<T[]> items)
102                 {
103                         return outgoing.TryReceiveAll (out items);
104                 }
105
106                 public void TriggerBatch ()
107                 {
108                         int earlyBatchSize;
109                         do {
110                                 earlyBatchSize = batchCount;
111                                 if (earlyBatchSize == 0)
112                                         return;
113                         } while (Interlocked.CompareExchange (ref batchCount, 0, earlyBatchSize) != earlyBatchSize);
114
115                         MakeBatch (targets.Current, earlyBatchSize);
116                 }
117
118                 // TODO: there can be out-of-order processing of message elements if two collections
119                 // are triggered and work side by side. See if it's a problem or not.
120                 void BatchProcess ()
121                 {
122                         ITargetBlock<T[]> target = targets.Current;
123                         int current = Interlocked.Increment (ref batchCount);
124
125                         if (current % batchSize != 0)
126                                 return;
127
128                         Interlocked.Add (ref batchCount, -current);
129
130                         MakeBatch (target, batchSize);
131                 }
132
133                 void MakeBatch (ITargetBlock<T[]> target, int size)
134                 {
135                         T[] batch = new T[size];
136                         for (int i = 0; i < size; ++i)
137                                 messageQueue.TryTake (out batch[i]);
138
139                         if (target == null)
140                                 outgoing.AddData (batch);
141                         else
142                                 target.OfferMessage (headers.Increment (), batch, this, false);
143
144                         if (!outgoing.IsEmpty && targets.Current != null)
145                                 outgoing.ProcessForTarget (targets.Current, this, false, ref headers);
146                 }
147
148                 public void Complete ()
149                 {
150                         messageBox.Complete ();
151                 }
152
153                 public void Fault (Exception ex)
154                 {
155                         compHelper.Fault (ex);
156                 }
157
158                 public Task Completion {
159                         get {
160                                 return compHelper.Completion;
161                         }
162                 }
163
164                 public int OutputCount {
165                         get {
166                                 return outgoing.Count;
167                         }
168                 }
169         }
170 }
171