C# Client Library
A C# Client Library for the AnalyzeRe REST API
Test_OptimizationView.cs
Go to the documentation of this file.
1 using System;
2 using System.Collections.Generic;
3 using System.Linq;
4 
5 using AnalyzeRe;
6 using AnalyzeRe.APITypes;
7 using AnalyzeRe.Layers;
8 using AnalyzeRe.Metrics;
12 
13 #if MSTEST
14 using Microsoft.VisualStudio.TestTools.UnitTesting;
15 #elif NUNIT
16 using NUnit.Framework;
17 using TestClass = NUnit.Framework.TestFixtureAttribute;
18 using TestMethod = NUnit.Framework.TestAttribute;
19 using TestCategory = NUnit.Framework.CategoryAttribute;
20 #endif
21 
22 namespace AnalyzeReTesting.Tests.Model
23 {
24  [TestClass]
26  {
27  protected override bool PUT_Allowed => false;
28  protected override IResourceCollection<OptimizationView> collection_source => API.OptimizationViews;
29 
31  {
32  // Increase the request timeout for OptimizationView requests since can be slow.
33  AddCommonTestInitializeAction(() =>
36  }
37  }
38 
39  [TestClass]
40  public sealed class TestSuite_OptimizationView : BaseImmutableResourceTestSuite<OptimizationView>
41  {
42  #region Set Up and Configuration
43  private const string TypeName = "OptimizationView";
44  protected override bool DELETE_Allowed => true;
45 
46  protected override IInjectableResource<OptimizationView> TestInjectableResource =>
48 
50  {
51  // Increase the request timeout for OptimizationView requests since can be slow.
52  AddCommonTestInitializeAction(() =>
55  }
56  #endregion Set Up and Configuration
57 
58  [TestMethod, TestCategory(TypeName)]
60  {
61  // In the case of the OptimizationView - even after the resource is posted the properties
62  // may change as the OptimizationView's job gets "run". The Get_AllPropertiesRecognized
63  // method requires making two different GETs where all property values are identical -
64  // hence we must ensure the OptimizationView is done running before we run this test.
65  TestResource_Existing.PollUntilReady();
66  base.Test_Resource_GET_AllPropertiesRecognized();
67  }
68 
69  #region POST
70  #region analysis_profile
71  [TestMethod, TestCategory(TypeName)]
73  {
74  POST_Attribute_Null(l => l.analysis_profile);
75  }
76  [TestMethod, TestCategory(TypeName)]
78  {
79  POST_Reference_NullId(l => l.analysis_profile);
80  }
81  [TestMethod, TestCategory(TypeName)]
83  {
84  POST_Reference_EmptyStringId(l => l.analysis_profile);
85  }
86  [TestMethod, TestCategory(TypeName)]
88  {
89  POST_Reference_NonExistantId(l => l.analysis_profile);
90  }
91  #endregion analysis_profile
92 
93  #region iterations
94  [TestMethod, TestCategory(TypeName)]
96  {
97  POST_WithValue(l => l.iterations, 0, false);
98  }
99  [TestMethod, TestCategory(TypeName)]
101  {
102  // Min iterations is 1
103  POST_WithValue(l => l.iterations, 1, true);
104  }
105  //Test Takes too long to run.
106  //[TestMethod, TestCategory(TypeName)]
107  //public void Test_OptimizationView_POST_Iterations_Maximum()
108  //{
109  // // Max iterations is 10,000
110  // POST_WithValue(l => l.iterations, 10000, shouldSucceed: true);
111  //}
112  [TestMethod, TestCategory(TypeName)]
114  {
115  POST_WithValue(l => l.iterations, 10001, false);
116  }
117  #endregion iterations
118 
119  #region population_size
120  [TestMethod, TestCategory(TypeName)]
122  {
123  POST_WithValue(l => l.population_size, 0, false);
124  }
125  [TestMethod, TestCategory(TypeName)]
127  {
128  // Min population size is 1
129  POST_WithValue(l => l.population_size, 1, true);
130  }
131  //Test Takes too long to run.
132  //[TestMethod, TestCategory(TypeName)]
133  //public void Test_OptimizationView_POST_PopulationSize_Maximum()
134  //{
135  // // Max population size is 10,000
136  // POST_WithValue(l => l.population_size, 10000, shouldSucceed: true);
137  //}
138  [TestMethod, TestCategory(TypeName)]
140  {
141  POST_WithValue(l => l.population_size, 10001, false);
142  }
143  #endregion population_size
144 
145  #region custom_parameters
146  [TestMethod, TestCategory(TypeName)]
148  {
149  // Task #1162 (in Redmine) describes why requiring custom_parameters is overly strict
150  POST_Attribute_Null(l => l.custom_parameters);
151  }
152  [TestMethod, TestCategory(TypeName)]
154  {
155  // Currently Requires several custom parameters, so fail
156  POST_WithValue(l => l.custom_parameters,
157  new Dictionary<string, object>(), false);
158  }
159  [TestMethod, TestCategory(TypeName)]
161  {
162  OptimizationView modified = TestResource;
163  modified.custom_parameters["Test1"] = "TestValue";
164  modified.custom_parameters["Test2"] = 2;
165  modified.custom_parameters["Test3"] = 3.33;
166  GenericTest.POST_ValidResource(modified);
167  }
168 
169  #region Discretization
170  [TestMethod, TestCategory(TypeName)]
172  {
173  OptimizationView modified = TestResource;
174  modified.custom_parameters["Discretization"] = -0.001;
175  GenericTest.POST_InvalidResource_Fails(modified);
176  }
177  [TestMethod, TestCategory(TypeName)]
179  {
180  OptimizationView modified = TestResource;
181  modified.custom_parameters["Discretization"] = 0;
182  GenericTest.POST_ValidResource(modified);
183  }
184  [TestMethod, TestCategory(TypeName)]
186  {
187  OptimizationView modified = TestResource;
188  modified.custom_parameters["Discretization"] = 0.1;
189  GenericTest.POST_ValidResource(modified);
190  }
191  [TestMethod, TestCategory(TypeName)]
193  {
194  OptimizationView modified = TestResource;
195  modified.custom_parameters["Discretization"] = 0.1001;
196  GenericTest.POST_InvalidResource_Fails(modified);
197  }
198  #endregion Discretization
199 
200  #region Return_Period
201  [TestMethod, TestCategory(TypeName)]
203  {
204  OptimizationView modified = TestResource;
205  modified.custom_parameters["Objective 2 - Return Period"] = 0;
206  GenericTest.POST_InvalidResource_Fails(modified);
207  }
208  [TestMethod, TestCategory(TypeName)]
210  {
211  OptimizationView modified = TestResource;
212  modified.custom_parameters["Objective 2 - Return Period"] = 1e-15;
213  GenericTest.POST_ValidResource(modified);
214  }
215  [TestMethod, TestCategory(TypeName)]
217  {
218  OptimizationView modified = TestResource;
219  modified.custom_parameters["Objective 2 - Return Period"] = 1;
220  GenericTest.POST_ValidResource(modified);
221  }
222  [TestMethod, TestCategory(TypeName)]
224  {
225  OptimizationView modified = TestResource;
226  modified.custom_parameters["Objective 2 - Return Period"] = 2.001;
227  GenericTest.POST_InvalidResource_Fails(modified);
228  }
229  #endregion Return_Period
230  #endregion custom_parameters
231 
232  #region domain
233  [TestMethod, TestCategory(TypeName)]
235  {
236  POST_Attribute_Null(l => l.domains);
237  }
238 
239  [TestMethod, TestCategory(TypeName)]
241  {
242  POST_ListAttribute_Empty(l => l.domains);
243  }
244 
245  [TestMethod, TestCategory(TypeName)]
247  {
248  DomainEntry _withInvalidLayer = new DomainEntry
249  {
251  min = 0,
252  max = 1
253  };
254  POST_WithValue(l => l.domains,
255  new List<DomainEntry> { _withInvalidLayer }, false);
256  }
257 
258  [TestMethod, TestCategory(TypeName)]
260  {
261  OptimizationView modified = TestResource;
262  modified.domains.First().min = 0.5;
263  modified.domains.First().max = 0.4;
264  GenericTest.POST_InvalidResource_Fails(modified);
265  }
266 
267  [TestMethod, TestCategory(TypeName)]
269  {
270  OptimizationView modified = TestResource;
271  modified.domains.First().min = Int32.MinValue;
272  modified.domains.First().max = Int32.MaxValue;
273  GenericTest.POST_ValidResource(modified);
274  }
275 
276  [TestMethod, TestCategory(TypeName)]
278  {
279  OptimizationView modified = TestResource;
280  modified.domains.First().allow_exclude = true;
281  GenericTest.POST_ValidResource(modified);
282  }
283 
285  public static readonly HashSet<Type> UnsupportedOptimizationLayerTypes = new HashSet<Type>
286  {
287  typeof(Filter), typeof(FixedRateCurrencyConverter), typeof(IndustryLossWarranty),
288  typeof(Nested), typeof(SurplusShare), typeof(LossRank), typeof(NoClaimsBonus),
289  typeof(ValueAllocator), typeof(BackAllocatedLayer)
290  };
291 
292  [TestMethod, TestCategory(TypeName)]
294  {
295  IEnumerable<IInjectableResource<ILayer>> supportedTypes =
296  Samples.AllLayerTypesTestList.Where(t => !UnsupportedOptimizationLayerTypes
297  .Contains(t.GetType().GetGenericArguments()[0]));
298  foreach (IInjectableResource<ILayer> layer in supportedTypes)
299  {
300  OptimizationView modified = TestResource;
301  modified.domains.Add(new DomainEntry { layer = layer.AsReference, min = 0, max = 1 });
302  POST_ThenDoAction(modified, optimizationView =>
303  {
304  optimizationView = WaitForOptimizationResult(optimizationView);
305  Assert.AreEqual(TaskStatus.Success, optimizationView.status);
306  });
307  }
308  }
309 
310  [TestMethod, TestCategory(TypeName)]
312  {
313  IEnumerable<IInjectableResource<ILayer>> unsupportedTypes =
314  Samples.AllLayerTypesTestList.Where(t => UnsupportedOptimizationLayerTypes
315  .Contains(t.GetType().GetGenericArguments()[0]));
316  foreach (IInjectableResource<ILayer> layer in unsupportedTypes)
317  {
318  OptimizationView modified = TestResource;
319  modified.domains.Add(new DomainEntry { layer = layer.AsReference, min = 0, max = 1 });
320  GenericTest.POST_InvalidResource_Fails(modified);
321  }
322  }
323  #endregion domain
324 
325  #region CreatedAndModified
326  [TestMethod, TestCategory(TypeName)]
328  {
329  POST_ThenDoAction(TestResource, posted =>
330  {
331  Assert.IsNotNull(posted.created, "Expected a 'created' date to be assigned.");
332  Assert.IsNotNull(posted.modified, "Expected a 'modified' date to be assigned.");
333  });
334  }
335  #endregion CreatedAndModified
336 
337  #region target_currency
338  [TestMethod, TestCategory(TypeName)]
340  {
341  POST_Attribute_Null(l => l.target_currency);
342  }
343  #endregion target_currency
344  #endregion POST
345 
346  #region Results
347  private static readonly PollingOptions OptimizationPolling = new PollingOptions(
348  maxPollInterval: EnvironmentSettings.POLLING_INTERVAL,
350  );
351 
354  private static OptimizationView WaitForOptimizationResult(OptimizationView optimization)
355  {
356  // TODO: Currently so long because the OE can get hung up (Story #1631)
357  // Result should only take about 3 seconds, but allow up to 20 for now since first run
358  // always takes longer as Julia code must compile or something...
359  try
360  {
361  return optimization.PollUntilReady(OptimizationPolling);
362  }
363  catch (NotWaitingException ex)
364  {
365  string message = $"Waited {ex.TimeWaited} seconds for " +
366  "the optimization job to complete, but it never did.\n";
367  Exception toPrint = ex;
368  while (toPrint != null)
369  {
370  message = message + Environment.NewLine + ex;
371  toPrint = toPrint.InnerException;
372  }
373  Assert.Fail(message);
374  return null;
375  }
376  }
377 
378  [TestMethod, TestCategory(TypeName)]
380  {
381  POST_ThenDoAction(TestResource, optimizationView =>
382  {
383  optimizationView = WaitForOptimizationResult(optimizationView);
384  // Once the optimization is done, the initial portfolio metrics should be present.
385  OptimizationResult initialPortfolioMetrics = optimizationView.GetInitialPortfolioMetrics();
386  Assert.IsNotNull(initialPortfolioMetrics);
387  // Our test optimization had 2 objectives, so we expect two metrics.
388  Assert.AreEqual(2, initialPortfolioMetrics.objectives.Count);
389  // Check that there is at least one candidate result.
390  Assert.IsTrue(optimizationView.GetCandidateResultsCount() >= 1,
391  "Expected at least one candidate in the results.");
392  // Get the first optimization result and demonstrate that it's not null.
393  Candidate result = optimizationView.GetCandidateResult(0);
394  Assert.IsNotNull(result.objectives, "First candidate's objective results were null");
395  });
396  }
397 
398  [TestMethod, TestCategory(TypeName)]
399  [Obsolete("2018-04-23: This tests an obsolete feature that will be removed in the future.")]
401  {
402  POST_ThenDoAction(TestResource, optimizationView =>
403  {
404  optimizationView = WaitForOptimizationResult(optimizationView);
405  // Once the optimization is done, the initial portfolio metrics should be present.
406  optimizationView = optimizationView.Get();
407  Assert.IsNotNull(optimizationView.initial_portfolio_metrics);
408  // Our test optimization had 2 objectives, so we expect two metrics.
409  Assert.AreEqual(2, optimizationView.initial_portfolio_metrics.Count);
410  // Get the (legacy) results endpoint and demonstrate that it's not null.
411  CandidateResultsList result = optimizationView.result.Get();
412  Assert.IsNotNull(result, "optimization_view result was null");
413  Assert.IsTrue(result.candidates.Count >= 1, "Expected at least one candidate");
414  });
415  }
416 
417  [TestMethod, TestCategory(TypeName)]
419  {
420  POST_ThenDoAction(TestResource, optimizationView =>
421  {
422  optimizationView = WaitForOptimizationResult(optimizationView);
423  OptimizationResult initialPortfolio = optimizationView.GetInitialPortfolioMetrics();
424  Assert.IsNotNull(initialPortfolio);
425  // Our test optimization had 2 objectives, so we expect two metrics.
426  Assert.AreEqual(2, initialPortfolio.objectives.Count);
427  // Our test optimization had 3 constraints, so we expect three metrics.
428  Assert.AreEqual(3, initialPortfolio.constraints.Count);
429  // Our initial portfolio is not feasible, so we expect this to be false.
430  Assert.IsFalse(initialPortfolio.feasible);
431  // Test that we can also get the initial portfolio from the endpoint object
432  OptimizationResult fromEndpoint = optimizationView.initial_portfolio_result.Get();
433  Assert.IsNotNull(fromEndpoint);
434  });
435  }
436 
437  [TestMethod, TestCategory(TypeName)]
439  {
440  POST_ThenDoAction(TestResource, optimizationView =>
441  {
442  optimizationView = WaitForOptimizationResult(optimizationView);
443  // Assert that we can get a candidate via paging the sub-collection endpoint
444  ICollectionResponse<Candidate> result = optimizationView.candidates.List(0, 1);
445  long collectionCount = result.meta.total_count;
446  Assert.IsTrue(collectionCount >= 1, "Expected at least one candidate");
447  // Assert that we can also get the candidate by its index.
448  Candidate first = optimizationView.candidates.GetItem(result.items.First().id);
449  Assert.IsNotNull(first.index, "Expected the candidate index to be set.");
450  Assert.AreEqual(result.items.First().index, first.index);
451 
452  // Assert that we can also get a candidate via helper methods on the optimization
453  Assert.AreEqual(collectionCount, optimizationView.GetCandidateResultsCount());
454  Candidate fromParent = optimizationView.GetCandidateResult(first.index.Value);
455  Assert.AreEqual(first.index, fromParent.index);
456  Assert.IsNotNull(fromParent.optimization_view);
457  Assert.IsNotNull(fromParent.parameterization);
458  Assert.IsNotNull(fromParent.objectives);
459  Assert.IsNotNull(fromParent.constraints);
460  Assert.IsNotNull(fromParent.feasible);
461  });
462  }
463 
464  [TestMethod, TestCategory(TypeName)]
466  {
467  POST_ThenDoAction(TestResource, optimizationView =>
468  {
469  optimizationView = WaitForOptimizationResult(optimizationView);
470  ICollectionResponse<Candidate> result = optimizationView.candidates.List(0, 1);
471  Assert.IsTrue(result.meta.total_count >= 1, "Expected at least one candidate");
472  // Get the first candidate by its index.
473  Candidate first = optimizationView.candidates.GetItem(result.items.First().id);
474  // Assert that we can get the portfolio_view for a candidate
475  PortfolioView candidate_portfolio_view = first.portfolio_view.Get();
476  Assert.IsNotNull(candidate_portfolio_view.id);
477  // Assert that we can get metrics for this portfolio_view
478  TailMetrics metrics = candidate_portfolio_view.tail_metrics_blocking(1,
479  MetricsOptions.Default, OptimizationPolling);
480  Assert.IsNotNull(metrics);
481  Assert.AreEqual(1, metrics.context.probability);
482 
483  // Assert that we can also get the candidate portfolio_view using the helper method
484  PortfolioView fromHelper = optimizationView.GetCandidatePortfolioView(0);
485  Assert.IsNotNull(fromHelper);
486  // It should be identical to the above method
487  Assert.AreEqual(candidate_portfolio_view.id, fromHelper.id);
488  });
489  }
490 
491  [TestMethod, TestCategory(TypeName)]
493  {
494  POST_ThenDoAction(TestResource, optimizationView =>
495  {
496  optimizationView = WaitForOptimizationResult(optimizationView);
497  string result = optimizationView.csv.Get();
498  Assert.IsFalse(String.IsNullOrWhiteSpace(result), "optimization_view csv result was empty");
499  });
500  }
501 
502  [TestMethod, TestCategory(TypeName)]
504  {
505  // ARE-4557: If even one layer or loss set has a different currency,
506  // the whole thing falls apart.
507  POST_WithValue(ov => ov.domains, new List<DomainEntry>
508  {
509  new DomainEntry
510  {
511  layer = Samples.Layer_CatXL.Unposted.Change(l => l.limit,
512  new MonetaryUnit(100, "EUR")).Post().ToReference(),
513  min = 0.0,
514  max = 1.0
515  }
516  }, false);
517  }
518  #endregion Results
519 
520  #region Sensitivity Analysis
521  [TestMethod, TestCategory(TypeName)]
523  {
524  POST_ThenDoAction(TestResource, optimizationView =>
525  {
526  optimizationView = WaitForOptimizationResult(optimizationView);
527  SensitivityAnalysis sa = optimizationView.GetSensitivityAnalysis();
528 
529  Assert.IsNotNull(sa, "Sensitivity Analysis cannot be null");
530  Assert.IsNotNull(sa.sensitivities, "Analysis should contain list of sensitivities");
531  Assert.IsTrue(sa.sensitivities.Count > 0,
532  "Number of sensitivities for the test should be at least 1");
533  Assert.AreEqual(sa.sensitivities.Count, optimizationView.domains.Count,
534  "Number of sensitivities should be equal to the number of domains");
536  {
537  Assert.IsNotNull(s.min, "Min value should exist");
538  Assert.IsNotNull(s.max, "Max value should exist");
539  Assert.IsNotNull(s.mean, "Mean value should exist");
540  Assert.IsNotNull(s.ref_id, "Reference ID should exist");
541  Assert.IsNotNull(s.normalized_standard_deviation, "Standard deviation should exist");
542  Assert.IsNotNull(s.normalized_interquartile_range, "Interquartile range should exist");
543  Assert.IsNotNull(s.hist, "Histogram should exist");
544  Assert.AreEqual(20, s.hist.Count, "Histogram should have 20 bins");
545  }
546  });
547  }
548 
549  [TestMethod, TestCategory(TypeName)]
551  {
552  POST_ThenDoAction(TestResource, optimizationView =>
553  {
554  optimizationView = WaitForOptimizationResult(optimizationView);
555  SensitivityAnalysis sa = optimizationView.GetSensitivityAnalysis(new List<int> { 0 });
556 
557  Assert.IsNotNull(sa, "Sensitivity Analysis cannot be null");
558  Assert.IsNotNull(sa.sensitivities, "Analysis should contain list of sensitivities");
559  Assert.IsTrue(sa.sensitivities.Count > 0,
560  "Number of sensitivities for the test should be at least 1");
561  Assert.AreEqual(sa.sensitivities.Count, optimizationView.domains.Count,
562  "Number of sensitivities should be equal to the number of domains");
564  {
565  Assert.IsNotNull(s.min, "Min value should exist");
566  Assert.IsNotNull(s.max, "Max value should exist");
567  Assert.IsNotNull(s.mean, "Mean value should exist");
568  Assert.IsNotNull(s.ref_id, "Reference ID should exist");
569  Assert.IsNotNull(s.normalized_standard_deviation, "Standard deviation should exist");
570  Assert.IsNotNull(s.normalized_interquartile_range, "Interquartile range should exist");
571  Assert.IsNotNull(s.hist, "Histogram should exist");
572  Assert.AreEqual(20, s.hist.Count, "Histogram should have 20 bins");
573  }
574  });
575  }
576 
577  [TestMethod, TestCategory(TypeName)]
579  {
580  POST_ThenDoAction(TestResource, optimizationView =>
581  {
582  optimizationView = WaitForOptimizationResult(optimizationView);
583  // we do not expect to have 9999999 candidates for a view, such candidate should not exist
584  SensitivityAnalysis sa = optimizationView.GetSensitivityAnalysis(new List<int> { 9999099 });
585 
586  Assert.IsNotNull(sa, "Sensitivity Analysis cannot be null");
587  Assert.IsNotNull(sa.sensitivities, "Analysis should contain list of sensitivities");
588  Assert.AreEqual(sa.sensitivities.Count, 0, "Number of sensitivities for this test should be 0");
589  });
590  }
591  #endregion Sensitivity Analysis
592  }
593 }
static int POLLING_INTERVAL
Frequency with which to poll the server for updates during tests (in milliseconds).
Exposes sample resource objects, with built-in methods for injecting dependencies.
Definition: Samples.cs:13
IEnumerable< CandidateParameter > parameterization
The parameters of the layer_views that define this candidate.
Definition: Candidate.cs:58
IEnumerable< T > items
The collection items returned by this get response.
API methods / requests made available to the user.
static string Valid_NonExistant_UUID
Definition: Samples.cs:41
Context context
The context of a tail metrics request, indicating what parameters were used in the simulation that pr...
Definition: TailMetrics.cs:14
List< DomainEntry > domains
The list of layers to be optimized and their min/max constraints.
IReference< T > AsReference
A reference to the posted resource.
CollectionResponseMeta meta
The metadata associated with this collection get response.
The structure returned when requesting Tail Metrics for a view.
Determines the behaviour of the API when automatically retrying a request whose result is not yet rea...
static readonly ResourceCollection< OptimizationView > OptimizationViews
The collection of OptimizationViews on the server.
List< Candidate > candidates
The list of candidates this result consists of.
Dictionary< string, double > objectives
The set of objective function results for this candidate.
List< LayerSensitivity > sensitivities
Sensitivity analysis contains a list of sensitivity properties for each layer in the optimization vie...
IInjectableResource< OptimizationView > OptimizationView_Simulated
The structure returned by the results endpoint that contains candidates solutions to the optimization...
Representation of a Surplus Share contract.
Definition: SurplusShare.cs:8
A candidate is a potential solution to the optimization problem.
Definition: Candidate.cs:10
The LossRank layer allows one to select a subset of occurrences in a trial year based on the relative...
Definition: LossRank.cs:11
SubResource< PortfolioView > portfolio_view
An endpoint to request the portfolio_view that describes this candidate solution.
Definition: Candidate.cs:66
double probability
The probability determining where the tail distribution begins. Note that this is equal to the Window...
bool feasible
The feasibility of the candidate based on the constraints evaluated by the optimization algorithm for...
Represents the Analysis of a Portfolio
Dictionary< string, object > custom_parameters
A key/value pair dictionary of additional parameters taken by the optimization function.
Surfaces the value-allocator specialty structure (which is treated as a "layer definition" by the API...
Representation of an Industry Loss Warranty, which is a layer that triggers a payout (currently expre...
static int DefaultRequestTimeoutCollections
The default timeout used when requesting a resource collection from the server, in milliseconds...
A structure that can be used to change the currency of a loss stream from one currency to another at ...
The No Claims Bonus applies a payout to trials which contain no losses. (i.e. when there are no occur...
Sensitivity analysis for the optimization view
Filter is like a layer whose &#39;terms&#39; are to filter events out of the loss sources. By referencing some loss sets filters, the result is that only the losses that meet the criteria of the filters are emitted.
Definition: Filter.cs:12
IReference< OptimizationView > optimization_view
The optimization view that owns this candidate.
Definition: Candidate.cs:50
Acts as a replacement for IAPIResourceView.back_allocations. Computes the proportion of some sink str...
static new MetricsOptions Default
The default metrics request options used when none are specified.
Retrieve settings from environment variables if they exist, or the project settings file otherwise...
Implements the basic reference entity interface, but has no support for resolving references...
Definition: Reference.cs:14
int index
The index identifying this candidate within an OptimizationView&#39;s candidates.
Definition: Candidate.cs:54
static int DefaultRequestTimeout
The default timeout used for all simple server requests, in milliseconds.
Thrown when a request requires additional time to complete, but it exceeds the time we are willing to...
long total_count
The total number of resources in this collection.
A structure indicating the min/max share constraint for a layer.
Definition: DomainEntry.cs:7
static int OPTIMIZATION_TIMEOUTS
Amount of time to wait for optimization engine requests to complete (in milliseconds).
string id
The resource&#39;s unique identifier. It will be used in the request URL when requesting the resource fro...
Definition: APIResource.cs:16
List< IInjectableResource< ILayer > > AllLayerTypesTestList
A list of one of each type of layer that can be POSTed. Layer types that cannot be posted on their ow...
Covariant interface for the generic collection response type.
Representation of a set of Optimization Parameters.
Object to map metrics on the initial portfolio.
Dictionary< string, double > constraints
The set of constraint function results for this candidate.
Optional parameters which can be specified for all metrics requests.
Allows one or more source layers or layer_views to be attached as loss sources to some other layer de...
Definition: Nested.cs:21
Tests for a stored resource collection which do not require an instance of that resource to be define...
Generic Unit test implementations that will test REST methods on arbitrary resources.
Definition: GenericTest.cs:25
TaskStatus
The status of a data upload which may be in progress.
Definition: TaskStatus.cs:8