C# Client Library
A C# Client Library for the AnalyzeRe REST API
Loading...
Searching...
No Matches
Test_OptimizationView.cs
Go to the documentation of this file.
1using System;
2using System.Collections.Generic;
3using System.Linq;
4
5using AnalyzeRe;
12
13#if MSTEST
14using Microsoft.VisualStudio.TestTools.UnitTesting;
15#elif NUNIT
16using NUnit.Framework;
17using TestClass = NUnit.Framework.TestFixtureAttribute;
18using TestMethod = NUnit.Framework.TestAttribute;
19using TestCategory = NUnit.Framework.CategoryAttribute;
20#endif
21
23{
24 [TestClass]
26 {
27 protected override bool PUT_Allowed => false;
29
31 {
32 // Increase the request timeout for OptimizationView requests since can be slow.
33 AddCommonTestInitializeAction(() =>
36 }
37 }
38
39 [TestClass]
40 public sealed class TestSuite_OptimizationView : BaseImmutableResourceTestSuite<OptimizationView>
41 {
42 #region Set Up and Configuration
43 private const string TypeName = "OptimizationView";
44 protected override bool DELETE_Allowed => true;
45
48
52 WaitForOptimizationResult(posted);
53
55 {
56 // Increase the request timeout for OptimizationView requests since can be slow.
57 AddCommonTestInitializeAction(() =>
60 }
61 #endregion Set Up and Configuration
62
63 [TestMethod, TestCategory(TypeName)]
65 {
66 // In the case of the OptimizationView - even after the resource is posted the properties
67 // may change as the OptimizationView's job gets "run". The Get_AllPropertiesRecognized
68 // method requires making two different GETs where all property values are identical -
69 // hence we must ensure the OptimizationView is done running before we run this test.
70 TestResource_Existing.PollUntilReady(OptimizationPolling);
71 base.Test_Resource_GET_AllPropertiesRecognized();
72 }
73
74 #region POST
75 #region analysis_profile
76 [TestMethod, TestCategory(TypeName)]
78 {
79 POST_Attribute_Null(l => l.analysis_profile);
80 }
81 [TestMethod, TestCategory(TypeName)]
86 [TestMethod, TestCategory(TypeName)]
91 [TestMethod, TestCategory(TypeName)]
96 #endregion analysis_profile
97
98 #region iterations
99 [TestMethod, TestCategory(TypeName)]
101 {
102 POST_WithValue(l => l.iterations, 0, false);
103 }
104 [TestMethod, TestCategory(TypeName)]
106 {
107 // Min iterations is 1
108 POST_WithValue(l => l.iterations, 1, true);
109 }
110 //Test Takes too long to run.
111 //[TestMethod, TestCategory(TypeName)]
112 //public void Test_OptimizationView_POST_Iterations_Maximum()
113 //{
114 // // Max iterations is 10,000
115 // POST_WithValue(l => l.iterations, 10000, shouldSucceed: true);
116 //}
117 [TestMethod, TestCategory(TypeName)]
119 {
120 POST_WithValue(l => l.iterations, 10001, false);
121 }
122 #endregion iterations
123
124 #region population_size
125 [TestMethod, TestCategory(TypeName)]
127 {
128 POST_WithValue(l => l.population_size, 0, false);
129 }
130 [TestMethod, TestCategory(TypeName)]
132 {
133 // Min population size is 1
134 Skip.Indefinitely("ARE-4575");
135 POST_WithValue(l => l.population_size, 1, true);
136 }
137 //Test Takes too long to run.
138 //[TestMethod, TestCategory(TypeName)]
139 //public void Test_OptimizationView_POST_PopulationSize_Maximum()
140 //{
141 // // Max population size is 10,000
142 // POST_WithValue(l => l.population_size, 10000, shouldSucceed: true);
143 //}
144 [TestMethod, TestCategory(TypeName)]
146 {
147 POST_WithValue(l => l.population_size, 10001, false);
148 }
149 #endregion population_size
150
151 #region custom_parameters
152 [TestMethod, TestCategory(TypeName)]
154 {
155 // Task #1162 (in Redmine) describes why requiring custom_parameters is overly strict
156 POST_Attribute_Null(l => l.custom_parameters);
157 }
158 [TestMethod, TestCategory(TypeName)]
160 {
161 // Currently Requires several custom parameters, so fail
162 POST_WithValue(l => l.custom_parameters,
163 new Dictionary<string, object>(), false);
164 }
165 [TestMethod, TestCategory(TypeName)]
167 {
168 OptimizationView modified = TestResource;
169 modified.custom_parameters["Test1"] = "TestValue";
170 modified.custom_parameters["Test2"] = 2;
171 modified.custom_parameters["Test3"] = 3.33;
172 GenericTest.POST_ValidResource(modified);
173 }
174
175 #region Discretization
176 [TestMethod, TestCategory(TypeName)]
178 {
179 OptimizationView modified = TestResource;
180 modified.custom_parameters["Discretization"] = -0.001;
181 GenericTest.POST_InvalidResource_Fails(modified);
182 }
183 [TestMethod, TestCategory(TypeName)]
185 {
186 OptimizationView modified = TestResource;
187 modified.custom_parameters["Discretization"] = 0;
188 GenericTest.POST_ValidResource(modified);
189 }
190 [TestMethod, TestCategory(TypeName)]
192 {
193 OptimizationView modified = TestResource;
194 // Discretization can be no larger than the smallest difference
195 // between min and max share (excluding locked layers)
196 double maxDiscretization = modified.domains.Where(d => !d.min.Equals(d.max)).Min(d => d.max - d.min);
197 modified.custom_parameters["Discretization"] = maxDiscretization;
198 GenericTest.POST_ValidResource(modified);
199
200 // Now try exceeding this value
201 modified.custom_parameters["Discretization"] = maxDiscretization + 0.01;
202 GenericTest.POST_InvalidResource_Fails(modified);
203 }
204 #endregion Discretization
205
206 #region Return_Period
207 [TestMethod, TestCategory(TypeName)]
209 {
210 OptimizationView modified = TestResource;
211 modified.custom_parameters["Objective 2 - Return Period"] = 0;
212 GenericTest.POST_InvalidResource_Fails(modified);
213 }
214 [TestMethod, TestCategory(TypeName)]
216 {
217 OptimizationView modified = TestResource;
218 modified.custom_parameters["Objective 2 - Return Period"] = 1e-15;
219 GenericTest.POST_ValidResource(modified);
220 }
221 [TestMethod, TestCategory(TypeName)]
223 {
224 OptimizationView modified = TestResource;
225 modified.custom_parameters["Objective 2 - Return Period"] = 1;
226 GenericTest.POST_ValidResource(modified);
227 }
228 [TestMethod, TestCategory(TypeName)]
230 {
231 OptimizationView modified = TestResource;
232 modified.custom_parameters["Objective 2 - Return Period"] = 2.001;
233 GenericTest.POST_InvalidResource_Fails(modified);
234 }
235 #endregion Return_Period
236 #endregion custom_parameters
237
238 #region domain
239 [TestMethod, TestCategory(TypeName)]
241 {
242 POST_Attribute_Null(l => l.domains);
243 }
244
245 [TestMethod, TestCategory(TypeName)]
247 {
248 POST_ListAttribute_Empty(l => l.domains);
249 }
250
251 [TestMethod, TestCategory(TypeName)]
253 {
255 {
257 min = 0,
258 max = 1
259 };
260 POST_WithValue(l => l.domains,
261 new List<DomainEntry> { _withInvalidLayer }, false);
262 }
263
264 [TestMethod, TestCategory(TypeName)]
266 {
267 OptimizationView modified = TestResource;
268 modified.domains.First().min = 0.5;
269 modified.domains.First().max = 0.4;
270 GenericTest.POST_InvalidResource_Fails(modified);
271 }
272
273 [TestMethod, TestCategory(TypeName)]
275 {
276 OptimizationView modified = TestResource;
277 modified.domains.First().min = Int32.MinValue;
278 modified.domains.First().max = Int32.MaxValue;
279 GenericTest.POST_ValidResource(modified);
280 }
281
282 [TestMethod, TestCategory(TypeName)]
284 {
285 OptimizationView modified = TestResource;
286 modified.domains.First().allow_exclude = true;
287 GenericTest.POST_ValidResource(modified);
288 }
289
298
299 [TestMethod, TestCategory(TypeName)]
301 {
304 !UnsupportedOptimizationLayerTypes.Contains(t.UnderlyingType));
306 {
307 OptimizationView modified = TestResource;
308 modified.domains.Add(new DomainEntry { layer = layer.AsReference, min = 0, max = 1 });
309 POST_ThenDoAction(modified, optimizationView =>
310 {
311 optimizationView = WaitForOptimizationResult(optimizationView);
312 Assert.AreEqual(TaskStatus.Success, optimizationView.status);
313 });
314 }
315 }
316
317 [TestMethod, TestCategory(TypeName)]
319 {
322 UnsupportedOptimizationLayerTypes.Contains(t.UnderlyingType));
324 {
325 OptimizationView modified = TestResource;
326 modified.domains.Add(new DomainEntry { layer = layer.AsReference, min = 0, max = 1 });
327 GenericTest.POST_InvalidResource_Fails(modified);
328 }
329 }
330 #endregion domain
331
332 #region CreatedAndModified
333 [TestMethod, TestCategory(TypeName)]
335 {
336 POST_ThenDoAction(TestResource, posted =>
337 {
338 Assert.IsNotNull(posted.created, "Expected a 'created' date to be assigned.");
339 Assert.IsNotNull(posted.modified, "Expected a 'modified' date to be assigned.");
340 });
341 }
342 #endregion CreatedAndModified
343
344 #region target_currency
345 [TestMethod, TestCategory(TypeName)]
347 {
348 POST_Attribute_Null(l => l.target_currency);
349 }
350 #endregion target_currency
351 #endregion POST
352
353 #region Results
355 maxPollInterval: EnvironmentSettings.POLLING_INTERVAL,
356 maxPollTotalTime: EnvironmentSettings.OPTIMIZATION_TIMEOUTS
357 );
358
361 private static OptimizationView WaitForOptimizationResult(OptimizationView optimization)
362 {
363 // TODO: Currently so long because the OE can get hung up (Story #1631)
364 // Result should only take about 3 seconds, but allow up to 20 for now since first run
365 // always takes longer as Julia code must compile or something...
366 try
367 {
368 return optimization.PollUntilReady(OptimizationPolling);
369 }
370 catch (NotWaitingException ex)
371 {
372 string message = $"Waited {ex.TimeWaited} seconds for " +
373 "the optimization job to complete, but it never did.\n";
374 Exception toPrint = ex;
375 while (toPrint != null)
376 {
377 message = message + Environment.NewLine + ex;
378 toPrint = toPrint.InnerException;
379 }
380 Assert.Fail(message);
381 return null;
382 }
383 }
384
385 [TestMethod, TestCategory(TypeName)]
387 {
388 POST_ThenDoAction(TestResource, optimizationView =>
389 {
390 optimizationView = WaitForOptimizationResult(optimizationView);
391 // Once the optimization is done, the initial portfolio metrics should be present.
392 OptimizationResult initialPortfolioMetrics = optimizationView.GetInitialPortfolioMetrics();
394 // Our test optimization had 2 objectives, so we expect two metrics.
395 Assert.AreEqual(2, initialPortfolioMetrics.objectives.Count);
396 // Check that there is at least one candidate result.
397 Assert.IsTrue(optimizationView.GetCandidateResultsCount() >= 1,
398 "Expected at least one candidate in the results.");
399 // Get the first optimization result and demonstrate that it's not null.
400 Candidate result = optimizationView.GetCandidateResult(0);
401 Assert.IsNotNull(result.objectives, "First candidate's objective results were null");
402 });
403 }
404
405 [TestMethod, TestCategory(TypeName)]
406 [Obsolete("2018-04-23: This tests an obsolete feature that will be removed in the future.")]
408 {
409 POST_ThenDoAction(TestResource, optimizationView =>
410 {
411 optimizationView = WaitForOptimizationResult(optimizationView);
412 // Once the optimization is done, the initial portfolio metrics should be present.
414 Assert.IsNotNull(optimizationView.initial_portfolio_metrics);
415 // Our test optimization had 2 objectives, so we expect two metrics.
416 Assert.AreEqual(2, optimizationView.initial_portfolio_metrics.Count);
417 // Get the (legacy) results endpoint and demonstrate that it's not null.
418 CandidateResultsList result = optimizationView.result.Get();
419 Assert.IsNotNull(result, "optimization_view result was null");
420 Assert.IsTrue(result.candidates.Count >= 1, "Expected at least one candidate");
421 });
422 }
423
424 [TestMethod, TestCategory(TypeName)]
426 {
427 POST_ThenDoAction(TestResource, optimizationView =>
428 {
429 optimizationView = WaitForOptimizationResult(optimizationView);
430 OptimizationResult initialPortfolio = optimizationView.GetInitialPortfolioMetrics();
431 Assert.IsNotNull(initialPortfolio);
432 // Our test optimization had 2 objectives, so we expect two metrics.
433 Assert.AreEqual(2, initialPortfolio.objectives.Count);
434 // Our test optimization had 3 constraints, so we expect three metrics.
435 Assert.AreEqual(3, initialPortfolio.constraints.Count);
436 // Our initial portfolio is not feasible, so we expect this to be false.
437 Assert.IsFalse(initialPortfolio.feasible);
438 // Test that we can also get the initial portfolio from the endpoint object
439 OptimizationResult fromEndpoint = optimizationView.initial_portfolio_result.Get();
440 Assert.IsNotNull(fromEndpoint);
441 });
442 }
443
444 [TestMethod, TestCategory(TypeName)]
446 {
447 POST_ThenDoAction(TestResource, optimizationView =>
448 {
449 optimizationView = WaitForOptimizationResult(optimizationView);
450 // Assert that we can get a candidate via paging the sub-collection endpoint
451 ICollectionResponse<Candidate> result = optimizationView.candidates.List(0, 1);
452 long collectionCount = result.meta.total_count;
453 Assert.IsTrue(collectionCount >= 1, "Expected at least one candidate");
454 // Assert that we can also get the candidate by its index.
455 Candidate first = optimizationView.candidates.GetItem(result.items.First().id);
456 Assert.IsNotNull(first.index, "Expected the candidate index to be set.");
457 Assert.AreEqual(result.items.First().index, first.index);
458
459 // Assert that we can also get a candidate via helper methods on the optimization
460 Assert.AreEqual(collectionCount, optimizationView.GetCandidateResultsCount());
461 Candidate fromParent = optimizationView.GetCandidateResult(first.index.Value);
462 Assert.AreEqual(first.index, fromParent.index);
463 Assert.IsNotNull(fromParent.optimization_view);
464 Assert.IsNotNull(fromParent.parameterization);
465 Assert.IsNotNull(fromParent.objectives);
466 Assert.IsNotNull(fromParent.constraints);
467 Assert.IsNotNull(fromParent.feasible);
468 });
469 }
470
471 [TestMethod, TestCategory(TypeName)]
473 {
474 POST_ThenDoAction(TestResource, optimizationView =>
475 {
476 optimizationView = WaitForOptimizationResult(optimizationView);
477 ICollectionResponse<Candidate> result = optimizationView.candidates.List(0, 1);
478 Assert.IsTrue(result.meta.total_count >= 1, "Expected at least one candidate");
479 // Get the first candidate by its index.
480 Candidate first = optimizationView.candidates.GetItem(result.items.First().id);
481 // Assert that we can get the portfolio_view for a candidate
482 PortfolioView candidate_portfolio_view = first.portfolio_view.Get();
483 Assert.IsNotNull(candidate_portfolio_view.id);
484 // Assert that we can get metrics for this portfolio_view
485 TailMetrics metrics = candidate_portfolio_view.tail_metrics_blocking(1,
487 Assert.IsNotNull(metrics);
488 Assert.AreEqual(1, metrics.context.probability);
489
490 // Assert that we can also get the candidate portfolio_view using the helper method
491 PortfolioView fromHelper = optimizationView.GetCandidatePortfolioView(0);
492 Assert.IsNotNull(fromHelper);
493 // It should be identical to the above method
495 });
496 }
497
498 [TestMethod, TestCategory(TypeName)]
500 {
501 POST_ThenDoAction(TestResource, optimizationView =>
502 {
503 optimizationView = WaitForOptimizationResult(optimizationView);
504 string result = optimizationView.csv.Get();
505 Assert.IsFalse(String.IsNullOrWhiteSpace(result), "optimization_view csv result was empty");
506 });
507 }
508
509 [TestMethod, TestCategory(TypeName)]
511 {
512 // ARE-4557: If even one layer or loss set has a different currency,
513 // the whole thing falls apart.
514 POST_WithValue(ov => ov.domains, new List<DomainEntry>
515 {
516 new DomainEntry
517 {
518 layer = Samples.Layer_CatXL.Unposted.Change(l => l.limit,
519 new MonetaryUnit(100, "EUR")).Post().ToReference(),
520 min = 0.0,
521 max = 1.0
522 }
523 }, false);
524 }
525 #endregion Results
526
527 #region Sensitivity Analysis
528 [TestMethod, TestCategory(TypeName)]
530 {
531 POST_ThenDoAction(TestResource, optimizationView =>
532 {
533 optimizationView = WaitForOptimizationResult(optimizationView);
534 SensitivityAnalysis sa = optimizationView.GetSensitivityAnalysis();
535
536 Assert.IsNotNull(sa, "Sensitivity Analysis cannot be null");
537 Assert.IsNotNull(sa.sensitivities, "Analysis should contain list of sensitivities");
538 Assert.IsTrue(sa.sensitivities.Count > 0,
539 "Number of sensitivities for the test should be at least 1");
540 Assert.AreEqual(sa.sensitivities.Count, optimizationView.domains.Count,
541 "Number of sensitivities should be equal to the number of domains");
542 foreach (SensitivityAnalysis.LayerSensitivity s in sa.sensitivities)
543 {
544 Assert.IsNotNull(s.min, "Min value should exist");
545 Assert.IsNotNull(s.max, "Max value should exist");
546 Assert.IsNotNull(s.mean, "Mean value should exist");
547 Assert.IsNotNull(s.ref_id, "Reference ID should exist");
548 Assert.IsNotNull(s.normalized_standard_deviation, "Standard deviation should exist");
549 Assert.IsNotNull(s.normalized_interquartile_range, "Interquartile range should exist");
550 Assert.IsNotNull(s.hist, "Histogram should exist");
551 Assert.AreEqual(20, s.hist.Count, "Histogram should have 20 bins");
552 }
553 });
554 }
555
556 [TestMethod, TestCategory(TypeName)]
558 {
559 POST_ThenDoAction(TestResource, optimizationView =>
560 {
561 optimizationView = WaitForOptimizationResult(optimizationView);
562 SensitivityAnalysis sa = optimizationView.GetSensitivityAnalysis(new List<int> { 0 });
563
564 Assert.IsNotNull(sa, "Sensitivity Analysis cannot be null");
565 Assert.IsNotNull(sa.sensitivities, "Analysis should contain list of sensitivities");
566 Assert.IsTrue(sa.sensitivities.Count > 0,
567 "Number of sensitivities for the test should be at least 1");
568 Assert.AreEqual(sa.sensitivities.Count, optimizationView.domains.Count,
569 "Number of sensitivities should be equal to the number of domains");
570 foreach (SensitivityAnalysis.LayerSensitivity s in sa.sensitivities)
571 {
572 Assert.IsNotNull(s.min, "Min value should exist");
573 Assert.IsNotNull(s.max, "Max value should exist");
574 Assert.IsNotNull(s.mean, "Mean value should exist");
575 Assert.IsNotNull(s.ref_id, "Reference ID should exist");
576 Assert.IsNotNull(s.normalized_standard_deviation, "Standard deviation should exist");
577 Assert.IsNotNull(s.normalized_interquartile_range, "Interquartile range should exist");
578 Assert.IsNotNull(s.hist, "Histogram should exist");
579 Assert.AreEqual(20, s.hist.Count, "Histogram should have 20 bins");
580 }
581 });
582 }
583
584 [TestMethod, TestCategory(TypeName)]
586 {
587 POST_ThenDoAction(TestResource, optimizationView =>
588 {
589 optimizationView = WaitForOptimizationResult(optimizationView);
590 // we do not expect to have 9999999 candidates for a view, such candidate should not exist
591 SensitivityAnalysis sa = optimizationView.GetSensitivityAnalysis(new List<int> { 9999099 });
592
593 Assert.IsNotNull(sa, "Sensitivity Analysis cannot be null");
594 Assert.IsNotNull(sa.sensitivities, "Analysis should contain list of sensitivities");
595 Assert.AreEqual(sa.sensitivities.Count, 0, "Number of sensitivities for this test should be 0");
596 });
597 }
598 #endregion Sensitivity Analysis
599 }
600}
Tests for a stored resource collection which do not require an instance of that resource to be define...
Exposes sample resource objects, with built-in methods for injecting dependencies.
Definition Samples.cs:14
List< IInjectableResource< ILayer > > AllSaveableLayerTypesTestList
A list of one of each type of layer that can be POSTed. Layer types that cannot be posted on their ow...
static string Valid_NonExistant_UUID
Definition Samples.cs:41
List< IInjectableResource< ILayer > > AllLayerTypesTestList
A list of one of each type of layer.
IInjectableResource< OptimizationView > OptimizationView_Simulated
override IResourceCollection< OptimizationView > collection_source
override void AdditionalValidResourceTests(OptimizationView posted)
Ensure that any optimization_views posted by this test library complete successfully.
static readonly HashSet< Type > UnsupportedOptimizationLayerTypes
The layer types which aren't supported by optimization views.
override IInjectableResource< OptimizationView > TestInjectableResource
Retrieve settings from environment variables if they exist, or the project settings file otherwise.
static int OPTIMIZATION_TIMEOUTS
Amount of time to wait for optimization engine requests to complete (in milliseconds).
Generic Unit test implementations that will test REST methods on arbitrary resources.
Class used in unit tests to mark tests as skipped by using Assert.Inconclusive() method.
Definition SkipUntil.cs:14
static void Indefinitely(string ticket=null)
Skip the specified test.
Definition SkipUntil.cs:54
Describes a collection of resources which can be listed.
API methods / requests made available to the user.
static int DefaultRequestTimeout
The default timeout used for all simple server requests, in milliseconds.
static int DefaultRequestTimeoutCollections
The default timeout used when requesting a resource collection from the server, in milliseconds.
static readonly ResourceCollection< OptimizationView > OptimizationViews
The collection of OptimizationViews on the server.
Acts as a replacement for IAPIResourceView.back_allocations. Computes the proportion of some sink str...
A "delayed payment" payment pattern models claims being paid in instalments at fixed delays after the...
Filter is like a layer whose 'terms' are to filter events out of the loss sources....
Definition Filter.cs:13
A "delayed payment" payment pattern models claims being paid in instalments at specific dates followi...
A structure that can be used to change the currency of a loss stream from one currency to another at ...
Representation of an Industry Loss Warranty, which is a layer that triggers a payout (currently expre...
The LossRank layer allows one to select a subset of occurrences in a trial year based on the relative...
Definition LossRank.cs:12
Allows one or more source layers or layer_views to be attached as loss sources to some other layer de...
Definition Nested.cs:22
The No Claims Bonus applies a payout to trials which contain no losses. (i.e. when there are no occur...
Representation of a Surplus Share contract.
Surfaces the value-allocator specialty structure (which is treated as a "layer definition" by the API...
Optional parameters which can be specified for all metrics requests.
static new MetricsOptions Default
The default metrics request options used when none are specified.
The structure returned when requesting Tail Metrics for a view.
Thrown when a request requires additional time to complete, but it exceeds the time we are willing to...
The structure returned by the results endpoint that contains candidates solutions to the optimization...
List< Candidate > candidates
The list of candidates this result consists of.
A candidate is a potential solution to the optimization problem.
Definition Candidate.cs:11
A structure indicating the min/max share constraint for a layer.
Definition DomainEntry.cs:8
Object to map metrics on the initial portfolio.
Dictionary< string, double > objectives
The set of objective function results for this candidate.
Representation of a set of Optimization Parameters.
Dictionary< string, object > custom_parameters
A key/value pair dictionary of additional parameters taken by the optimization function.
List< DomainEntry > domains
The list of layers to be optimized and their min/max constraints.
Sensitivity analysis for the optimization view.
Determines the behaviour of the API when automatically retrying a request whose result is not yet rea...
Represents the Analysis of a Portfolio.
TaskStatus
The status of a data upload which may be in progress.
Definition TaskStatus.cs:9