C# Client Library
A C# Client Library for the AnalyzeRe REST API
Loading...
Searching...
No Matches
Test_APIResourceView.cs
Go to the documentation of this file.
1using System;
2using System.Collections.Generic;
3using System.IO;
4using System.Linq;
5using System.Net;
6
7using AnalyzeRe;
16
17using RestSharp;
18#if MSTEST
19using Microsoft.VisualStudio.TestTools.UnitTesting;
20#elif NUNIT
21using NUnit.Framework;
22using TestClass = NUnit.Framework.TestFixtureAttribute;
23using TestMethod = NUnit.Framework.TestAttribute;
24using TestCategory = NUnit.Framework.CategoryAttribute;
25#endif
26
28{
30 public static class TestSuite_APIResourceView
31 {
34 public static readonly PollingOptions SimulationPolling = new PollingOptions(
35 maxPollInterval: EnvironmentSettings.POLLING_INTERVAL,
36 maxPollTotalTime: EnvironmentSettings.SIMULATION_TIMEOUT
37 );
38
41 public static void TestMetricsRetrievable<T>(T view) where T : IAPIResourceView
42 {
43 Assert.IsNotNull(view.id, "The view must first be posted before testing its metrics.");
44 try
45 {
46 TailMetrics result = view.tail_metrics_blocking(1,
48 Console.WriteLine($"{view.GetType().NiceTypeName()} {view.id} " +
49 $"resulting metrics:\n{result.Serialize(true)}");
50 }
51 catch (NotWaitingException ex)
52 {
53 if (ex.InnerException is APIRequestException apiResponse)
54 Console.WriteLine("Timed out while waiting on metrics for the resource: " +
55 apiResponse.RestResponse.ResponseUri);
56
57 // If we're stuck in the queue but something else is blocking it, set the test
58 // to "Inconclusive" instead of "Failed". This clears out some of the noise of a
59 // failed test run so we can more easily find the original failed test.
60 if (ex.QueuePosition.HasValue && ex.QueuePosition >= 1)
61 Assert.Inconclusive("Cannot complete the test because a previous test has " +
62 $"blocked the Simulation Queue (current position: {ex.QueuePosition})");
63 throw;
64 }
65 }
66 }
67
68 [TestClass]
70 where T : IAPIResourceView
71 {
72 #region Set Up and Configuration
73 private const string TypeName = "IAPIResourceView";
74 #endregion Set Up and Configuration
75
77 private IReference<ILayerView> _layerViewComponentSingleton;
78
80
82 {
83 AddCommonTestCleanupAction(() => _layerViewComponentSingleton = null);
84 }
85
87 public override void AdditionalValidResourceTests(T posted) =>
88 TestSuite_APIResourceView.TestMetricsRetrievable(posted);
89
90 #region POST
91 #region analysis_profile
92 [TestMethod, TestCategory(TypeName)]
94 {
95 POST_Attribute_Null(l => l.analysis_profile);
96 }
97 [TestMethod, TestCategory(TypeName)]
99 {
100 POST_Reference_NullId(l => l.analysis_profile);
101 }
102 [TestMethod, TestCategory(TypeName)]
104 {
105 POST_Reference_EmptyStringId(l => l.analysis_profile);
106 }
107 [TestMethod, TestCategory(TypeName)]
109 {
110 POST_Reference_NonExistantId(l => l.analysis_profile);
111 }
112 #endregion analysis_profile
113
114 #region target_currency
115 [TestMethod, TestCategory(TypeName)]
117 {
118 POST_Attribute_Null(l => l.target_currency);
119 }
120 [TestMethod, TestCategory(TypeName)]
122 {
123 POST_WithValue(l => l.target_currency, "EUR", true);
124 }
125 #endregion target_currency
126 #endregion POST
127
128 #region Metrics
129 #region Helper Methods
130 protected void Test_IAPIResourceView_Action_Succeeds(Action<T> action)
131 {
133 }
134 protected void Test_IAPIResourceView_Action_Fails<TException>(Action<T> action,
135 Action<TException> exceptionTest) where TException : Exception
136 {
138 AssertApi.ExceptionThrown(() => action(posted), exceptionTest));
139 }
140
142 Perspective leastGranularPerspective = null)
143 {
144 // TODO: Compute the least granular perspective by looking at all loss sets?
145 if (leastGranularPerspective == null)
146 leastGranularPerspective = Perspective.LossGross;
147 // After the successful post, it should always be possible to get metrics
148 // for NetLoss perspective, but not necessarily other perspectives.
149 Test_Metrics_Perspective(posted, Perspective.NetLoss, true);
150 Test_Metrics_Perspective(posted, Perspective.LossGross,
151 leastGranularPerspective.Equals(Perspective.LossGross));
152 }
153
154 private static void Test_Metrics_Perspective(T posted, Perspective perspective,
155 bool shouldSucceed)
156 {
157 try
158 {
159 SimulationOptions setPerspective = new SimulationOptions(perspective);
160 posted.tail_metrics_blocking(1, new MetricsOptions(setPerspective), SimulationPolling);
161 posted.exceedance_probability_blocking(1,
163 if (!shouldSucceed)
164 {
165 Assert.Fail("Metrics for perspective " + perspective +
166 " succeeded, when an error was expected.");
167 }
168 }
169 catch (APIRequestException ex)
170 {
171 if (shouldSucceed)
172 {
173 Assert.Fail("Metrics for perspective " + perspective + " failed, when " +
174 "it was expected to succeed: " + ex.ServerError.message);
175 }
176 else
177 Assert.AreEqual(HttpStatusCode.BadRequest, ex.RestResponse.StatusCode);
178 }
179 }
180
182 public static Action<ArgumentOutOfRangeException> ProbabilityError(double min, double max)
183 {
184 string expectedMessage = min < 0 || min >= 1 ? "The min probability must be " +
185 (min < 0 ? "greater than or equal to zero." : "less than one.") + " The range is [0,1)" :
186 max <= 0 || max > 1 ? "The max probability must be " +
187 (max <= 0 ? "greater than zero." : "less than or equal to one.") + " The range is (0,1]" :
188 "The max probability must be greater or equal to than the min probability.";
189 return ex => Assert.IsTrue(ex.Message.StartsWith(expectedMessage),
190 "Expected an ArgumentOutOfRangeException with an error message starting with:\n" +
191 $"{expectedMessage}\n{ex.Message}\n^ This was the actual message. " +
192 $"Full Exception Details:\n{ex}");
193 }
194 #endregion Helper Methods
195
196 #region TailMetrics
197 #region Helper Methods
198 private void Test_IAPIResourceView_GET_TailMetrics_Succeeds(
199 double probability, MetricsOptions options = null)
200 {
202 arg => GetTailMetricsAction(probability, options)(arg));
203 }
204
205 private void Test_IAPIResourceView_GET_TailMetrics_Fails<TException>(
206 Action<TException> exceptionTest, double probability, MetricsOptions options = null)
207 where TException : Exception
208 {
209 Test_IAPIResourceView_Action_Fails(
210 arg => GetTailMetricsAction(probability, options)(arg), exceptionTest);
211 }
212
213 private static Func<T, TailMetrics> GetTailMetricsAction(
214 double probability, MetricsOptions options = null)
215 {
216 return view =>
217 {
218 TailMetrics result = view.tail_metrics_blocking(
219 probability, options, SimulationPolling);
220 // Check that metrics results were filled in correctly
221 AssertTailDistributionMetricsExist(result);
222 // Check that tail metrics context properties were filled in appropriately
223 RunTailMetricsContextAssertions(view, probability, options, result.context);
224 return result;
225 };
226 }
227
230 private static void AssertTailDistributionMetricsExist(TailDistributionMetrics result)
231 {
232 Assert.IsNotNull(result);
233 Assert.AreNotEqual(Double.NaN, result.min, "min was not assigned a value.");
234 Assert.AreNotEqual(Double.NaN, result.mean, "mean was not assigned a value.");
235 Assert.AreNotEqual(Double.NaN, result.max, "max was not assigned a value.");
236 Assert.AreNotEqual(Double.NaN, result.variance, "variance was not assigned a value.");
237 Assert.AreNotEqual(Double.NaN, result.skewness, "skewness was not assigned a value.");
238 Assert.AreNotEqual(Double.NaN, result.kurtosis, "kurtosis was not assigned a value.");
239 }
240
241 private static void RunTailMetricsContextAssertions(
242 T view, double probability, MetricsOptions options, TailMetrics.Context context)
243 {
244 // If no options were supplied, compare result context to a default set of options.
245 options = options ?? new MetricsOptions();
246
247 // Assertions that should be met as part of any successful tail metrics result
248 AssertBasicOptionsMatchContext(view, options, context);
249 AssertApi.DoublesAreEqual(probability, context.probability);
250 Assert.AreEqual(options.currency ?? view.target_currency, context.currency);
251
252 // If this is a PortfolioView, participation is always applied regardless of
253 // the request parameter.
254 if (view is PortfolioView)
255 Assert.IsTrue(context.apply_participation);
256 // If this is a LayerView, apply_participation defaults to false if not supplied.
257 else
258 Assert.AreEqual(options.apply_participation ?? false, context.apply_participation);
259 }
260
266 private static void AssertBasicOptionsMatchContext(T view, AggregationOptions options, Context context)
267 {
268 // If no options were supplied, compare result context to a default set of options.
269 options = options ?? new AggregationOptions();
270
271 // If filter was not set, we expect it to come back as the AnyFilter from the
272 // analysis profile associated with the view.
273 string expectedFilter = options.filter ?? TestSuite_AnalysisProfile_Base.GetDefaultFilterNameForAnalysisProfile(view.analysis_profile);
274 Assert.AreEqual(expectedFilter, context.filter);
275
276 // If perspective was not set, we expect it to come back as the current
277 // analysis profile's default perspective
278 Perspective expectedPerspective = options.perspective ?? TestSuite_Perspective.GetDefaultPerspectiveForAnalysisProfile(view.analysis_profile);
279 Assert.AreEqual(expectedPerspective, context.perspective);
280
281 // If aggregation method was not set, we expect it to come back as AEP
282 Assert.AreEqual(options.aggregation_method ?? AggregationMethod.AEP,
283 context.aggregation_method);
284
285 // If secondary uncertainty was not set, we expect it to come back as TRUE
286 Assert.AreEqual(options.secondary_uncertainty ?? true, context.secondary_uncertainty);
287
288 // Check that reporting period matches within tolerance (if set)
291 }
292 #endregion Helper Methods
293
294 #region Test Methods
295 [TestMethod, TestCategory(TypeName)]
297 {
298 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1);
299 Test_IAPIResourceView_GET_TailMetrics_Succeeds(0.01);
300 // Test a high precision value survives the round trip.
301 Test_IAPIResourceView_GET_TailMetrics_Succeeds(Math.PI / 4d);
302 // Test a very small probability maintains precision
303 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1.234556789E-12);
304 }
305
306 [TestMethod, TestCategory(TypeName)]
308 {
309 void TestBadProbability(double probability) =>
310 // Test that the appropriate client-side validation occurs
311 Test_IAPIResourceView_GET_TailMetrics_Fails(ProbabilityError(0, probability), probability);
312
313 TestBadProbability(0);
314 TestBadProbability(1.00000000000001);
315 TestBadProbability(2);
316 TestBadProbability(-0.5);
317 TestBadProbability(Double.MinValue);
318 TestBadProbability(Double.MaxValue);
319 }
320
321 [TestMethod, TestCategory(TypeName)]
323 {
325 {
326 List<IReference<LossFilter>> availableFilters =
327 Reflection.Resolve(view.analysis_profile).loss_filters;
328 foreach (IReference<LossFilter> lf in availableFilters)
329 GetTailMetricsAction(1, new MetricsOptions(filter: Reflection.Resolve(lf).name))(view);
330 });
331 }
332
333 [TestMethod, TestCategory(TypeName)]
335 {
336 Test_IAPIResourceView_GET_TailMetrics_Fails(
337 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest),
338 1, new MetricsOptions(filter: "NonExistant"));
339 }
340
341 #region Test Perspectives
344 private void TestGetTailMetricsForPerspective(T view, Perspective persp) =>
345 AssertApi.MethodIsAllowed(() => GetTailMetricsAction(1, new MetricsOptions(perspective: persp))(view),
346 $"Get TailMetrics for perspective {persp}");
347
348 [TestMethod, TestCategory(TypeName)]
350 {
352 TestGetTailMetricsForPerspective(view, perspective)));
353 }
354
355 [TestMethod, TestCategory(TypeName)]
356 [Obsolete("Tests an obsolete factory method for constructing legacy perspectives")]
358 {
360 TestGetTailMetricsForPerspective(view, perspective)));
361 }
362 #endregion Test Perspectives
363
364 [TestMethod, TestCategory(TypeName)]
366 {
367 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1, new MetricsOptions("GBP"));
368 }
369
370 [TestMethod, TestCategory(TypeName)]
372 {
373 Test_IAPIResourceView_GET_TailMetrics_Fails(
374 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest),
375 1, new MetricsOptions("ALL"));
376 }
377
378 [TestMethod, TestCategory(TypeName)]
380 {
381 Test_IAPIResourceView_GET_TailMetrics_Succeeds(0.95d,
382 new MetricsOptions(aggregation_method: AggregationMethod.OEP));
383 }
384
385 [TestMethod, TestCategory(TypeName)]
387 {
388 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1 / 3d,
389 new MetricsOptions(secondary_uncertainty: true));
390
391 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1 / 3d,
392 new MetricsOptions(secondary_uncertainty: false));
393 }
394
395 [TestMethod, TestCategory(TypeName)]
397 {
398 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1 / 3d, new MetricsOptions(
399 reporting_period: new ReportingPeriod(DateTime.UtcNow, DateTime.UtcNow.AddYears(1))));
400 }
401
402 [TestMethod, TestCategory(TypeName)]
404 {
405 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1 / 3d,
406 new MetricsOptions(apply_participation: true));
407
408 Test_IAPIResourceView_GET_TailMetrics_Succeeds(1 / 3d,
409 new MetricsOptions(apply_participation: false));
410 }
411
412 [TestMethod, TestCategory(TypeName)]
414 {
416 {
417 double expectedLoss = API.PollUntilReady(
418 () => view.GetSubResource<double>(@"el"), SimulationPolling);
419 TailMetrics metrics = view.tail_metrics_blocking(1,
421 AssertApi.DoublesAreEqual(expectedLoss, metrics.mean);
422 });
423 }
424
425 [TestMethod, TestCategory(TypeName)]
426 public virtual void Test_IAPIResourceView_GET_Tvar()
427 {
429 {
430 double tvar = API.PollUntilReady(
431 () => view.GetSubResource<double>(@"tvar/0.5"), SimulationPolling);
432 TailMetrics metrics = view.tail_metrics_blocking(0.5,
434 AssertApi.DoublesAreEqual(tvar, metrics.mean);
435 });
436 }
437
438 [TestMethod, TestCategory(TypeName)]
440 {
442 {
443 List<TailMetrics> result = view.tail_metrics_blocking(
444 new List<double> { 0.1, 1, 0.5, 0.25 },
446
447 // Assertions that should be met as part of any successful tail metrics result
448 Assert.AreEqual(4, result.Count, "Expected 4 TailMetrics responses");
449 // TODO: Guarantee order once ARE-3264 is complete
450 /*
451 AssertApi.DoublesAreEqual(1, result.First().context.probability);
452 AssertApi.DoublesAreEqual(0.5, result.Skip(1).First().context.probability);
453 AssertApi.DoublesAreEqual(0.25, result.Skip(2).First().context.probability);
454 AssertApi.DoublesAreEqual(0.1, result.Skip(3).First().context.probability);
455 */
456 });
457 }
458
459 [TestMethod, TestCategory(TypeName)]
461 {
462 // Check that even though the server will return a non array response,
463 // we can still handle it and put it into the expected list response.
465 {
466 List<TailMetrics> result = view.tail_metrics_blocking(
467 new List<double> { 0.1 }, MetricsOptions.Default, SimulationPolling);
468
469 Assert.AreEqual(1, result.Count, "Expected 1 TailMetrics response in a list");
470 AssertApi.DoublesAreEqual(0.1, result.First().context.probability);
471 });
472 }
473
474 [TestMethod, TestCategory(TypeName)]
476 {
477 // Test the ability to invoke all overloads and extension methods that "route"
478 // to tail_metrics, including any type conversions or inferences meant to be supported.
480 {
482 arg.tail_metrics_blocking(1, options);
483 arg.tail_metrics_blocking(0.5, options);
484 arg.tail_metrics_blocking(new[] { 1d }, options);
485 arg.tail_metrics_blocking(new List<double> { 1, 0.5, 0.25 }, options);
486 arg.tail_metrics_blocking(new HashSet<double> { 1, 0.5, 0.25 }, options);
487 arg.tail_metrics_blocking(Enumerable.Range(1, 10).Select(i => 1d / i), options);
488 arg.tail_metrics(1, options);
489 arg.tail_metrics(0.5, options);
490 arg.tail_metrics(new[] { 1d }, options);
491 arg.tail_metrics(new List<double> { 1, 0.5, 0.25 }, options);
492 arg.tail_metrics(new HashSet<double> { 1, 0.5, 0.25 }, options);
493 arg.tail_metrics(Enumerable.Range(1, 10).Select(i => 1d / i), options);
494 arg.expected_loss(options);
495 arg.var(0.5, options);
496 arg.tvar(0.5, options);
497 });
498 }
499
500 [TestMethod, TestCategory(TypeName)]
502 {
503 // ARE-7344 /tail_metrics/ can be used to get tail metrics using the API.RequestAndParse method, which
504 // then should be successfully casted to the TailMetrics object
506 {
507 Double probability = 0.25;
508 TailMetrics regular_metrics = view.tail_metrics_blocking(probability,
510 TailMetrics endpoint_metrics = API.RequestAndParse<TailMetrics>(
511 $"/{view.collection_name}/{view.id}/tail_metrics/{probability}",
512 Method.GET, MetricsOptions.Default.GetParameters());
513 Assert.AreEqual(regular_metrics.context.probability, endpoint_metrics.context.probability);
514 Assert.AreEqual(regular_metrics.context.max_probability, endpoint_metrics.context.max_probability);
515 Assert.AreEqual(regular_metrics.context.min_probability, endpoint_metrics.context.min_probability);
516 Assert.AreEqual(regular_metrics.mean, endpoint_metrics.mean);
517 Assert.AreEqual(regular_metrics.variance, endpoint_metrics.variance);
518 Assert.AreEqual(regular_metrics.skewness, endpoint_metrics.skewness);
519 Assert.AreEqual(regular_metrics.kurtosis, endpoint_metrics.kurtosis);
520 });
521 }
522 #endregion Test Methods
523 #endregion TailMetrics
524
525 #region WindowMetrics
526 #region Helper Methods
527 private void Test_IAPIResourceView_GET_WindowMetrics_Succeeds(
528 ProbabilityWindow range, MetricsOptions options = null)
529 {
531 arg => GetWindowMetricsAction(range, options)(arg));
532 }
533
534 private void Test_IAPIResourceView_GET_WindowMetrics_Fails<TException>(
535 Action<TException> exceptionTest, ProbabilityWindow range, MetricsOptions options = null)
536 where TException : Exception
537 {
538 Test_IAPIResourceView_Action_Fails(
539 arg => GetWindowMetricsAction(range, options)(arg), exceptionTest);
540 }
541
542 private static Func<T, WindowMetrics> GetWindowMetricsAction(
543 ProbabilityWindow range, MetricsOptions options = null)
544 {
545 return view =>
546 {
547 WindowMetrics result = view.window_metrics_blocking(
548 range, options, SimulationPolling);
549 // Check that metrics results were filled in correctly
550 AssertTailDistributionMetricsExist(result);
551 // Check that tail metrics context properties were filled in appropriately
552 RunWindowMetricsContextAssertions(view, range, options, result.context);
553 return result;
554 };
555 }
556
557 private static void RunWindowMetricsContextAssertions(
558 T view, ProbabilityWindow range, MetricsOptions options, WindowMetrics.Context context)
559 {
560 // If no options were supplied, compare result context to a default set of options.
561 options = options ?? new MetricsOptions();
562
563 // Assertions that should be met as part of any successful tail metrics result
564 AssertBasicOptionsMatchContext(view, options, context);
565 AssertApi.DoublesAreEqual(range.min_probability, context.min_probability);
566 AssertApi.DoublesAreEqual(range.max_probability, context.max_probability);
567 Assert.AreEqual(options.currency ?? view.target_currency, context.currency);
568
569 // If this is a PortfolioView, participation is always applied regardless of
570 // the request parameter.
571 if (view is PortfolioView)
572 Assert.IsTrue(context.apply_participation);
573 // If this is a LayerView, apply_participation defaults to false if not supplied.
574 else
575 Assert.AreEqual(options.apply_participation ?? false, context.apply_participation);
576 }
577 #endregion Helper Methods
578
579 #region Test Methods
580 [TestMethod, TestCategory(TypeName)]
582 {
583 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.All);
584 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(new ProbabilityWindow(0.01, 1));
585 // Test a high precision value survives the round trip.
586 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(new ProbabilityWindow(Math.PI / 10d, Math.PI / 4d));
587 // Test a very small probability maintains precision
588 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.Tail(1.234556789E-12));
589 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(new ProbabilityWindow(0, 1.234556789E-12));
590 }
591
592 [TestMethod, TestCategory(TypeName)]
594 {
595 void TestBadProbability(double min, double max)
596 {
597 // Test that the appropriate client-side validation occurs when creating this probability range:
598 AssertApi.ExceptionThrown(() => _ = new ProbabilityWindow(min, max), ProbabilityError(min, max));
599 // Circumvent client-side validation and assert that the server returns an error:
600 Test_IAPIResourceView_Action_Fails(
601 arg => arg.GetSubResource<WindowMetrics>($"window_metrics/{min}_{max}/"),
602 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest));
603 }
604
605 TestBadProbability(0, 0);
606 TestBadProbability(1, 1);
607 TestBadProbability(0, 1.00000000000001);
608 TestBadProbability(0, 2);
609 TestBadProbability(-.1, 0);
610 TestBadProbability(0, Double.MaxValue);
611 TestBadProbability(Double.MinValue, 0);
612 TestBadProbability(0, Double.MinValue);
613 TestBadProbability(Double.MaxValue, 0);
614 TestBadProbability(.5, .4);
615 TestBadProbability(1, 0);
616 }
617
618 [TestMethod, TestCategory(TypeName)]
620 {
622 {
623 List<IReference<LossFilter>> availableFilters =
624 Reflection.Resolve(view.analysis_profile).loss_filters;
625 foreach (IReference<LossFilter> lf in availableFilters)
626 GetWindowMetricsAction(ProbabilityWindow.All,
627 new MetricsOptions(filter: Reflection.Resolve(lf).name))(view);
628 });
629 }
630
631 [TestMethod, TestCategory(TypeName)]
633 {
634 Test_IAPIResourceView_GET_WindowMetrics_Fails(
635 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest),
636 ProbabilityWindow.All, new MetricsOptions(filter: "NonExistant"));
637 }
638
639 #region Test Perspectives
642 private void TestGetWindowMetricsForPerspective(T view, Perspective persp) =>
643 AssertApi.MethodIsAllowed(() => GetWindowMetricsAction(ProbabilityWindow.All,
644 new MetricsOptions(perspective: persp))(view), $"Get WindowMetrics for perspective {persp}");
645
646 [TestMethod, TestCategory(TypeName)]
648 {
650 TestGetWindowMetricsForPerspective(view, perspective)));
651 }
652
653 [TestMethod, TestCategory(TypeName)]
654 [Obsolete("Tests an obsolete factory method for constructing legacy perspectives")]
656 {
658 TestGetWindowMetricsForPerspective(view, perspective)));
659 }
660 #endregion Test Perspectives
661
662 [TestMethod, TestCategory(TypeName)]
664 {
665 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.All, new MetricsOptions("GBP"));
666 }
667
668 [TestMethod, TestCategory(TypeName)]
670 {
671 Test_IAPIResourceView_GET_WindowMetrics_Fails(
672 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest),
674 }
675
676 [TestMethod, TestCategory(TypeName)]
678 {
679 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.Tail(0.95d),
680 new MetricsOptions(aggregation_method: AggregationMethod.OEP));
681 }
682
683 [TestMethod, TestCategory(TypeName)]
685 {
686 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.Tail(1 / 3d),
687 new MetricsOptions(secondary_uncertainty: true));
688
689 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.Tail(1 / 3d),
690 new MetricsOptions(secondary_uncertainty: false));
691 }
692
693 [TestMethod, TestCategory(TypeName)]
695 {
696 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.Tail(1 / 3d), new MetricsOptions(
697 reporting_period: new ReportingPeriod(DateTime.UtcNow, DateTime.UtcNow.AddYears(1))));
698 }
699
700 [TestMethod, TestCategory(TypeName)]
702 {
703 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.Tail(1 / 3d),
704 new MetricsOptions(apply_participation: true));
705
706 Test_IAPIResourceView_GET_WindowMetrics_Succeeds(ProbabilityWindow.Tail(1 / 3d),
707 new MetricsOptions(apply_participation: false));
708 }
709
710 [TestMethod, TestCategory(TypeName)]
712 {
714 {
715 double expectedLoss = API.PollUntilReady(
716 () => view.GetSubResource<double>(@"el"), SimulationPolling);
717 WindowMetrics metrics = view.window_metrics_blocking(ProbabilityWindow.All,
719 AssertApi.DoublesAreEqual(expectedLoss, metrics.mean);
720 });
721 }
722
723 [TestMethod, TestCategory(TypeName)]
725 {
727 {
728 double tvar = API.PollUntilReady(
729 () => view.GetSubResource<double>(@"tvar/0.5"), SimulationPolling);
730 WindowMetrics metrics = view.window_metrics_blocking(ProbabilityWindow.Tail(0.5),
732 AssertApi.DoublesAreEqual(tvar, metrics.mean);
733 });
734 }
735
736 [TestMethod, TestCategory(TypeName)]
738 {
740 {
741 List<WindowMetrics> result = view.window_metrics_blocking(
742 new List<ProbabilityWindow> {
747 },
749
750 // Assertions that should be met as part of any successful tail metrics result
751 Assert.AreEqual(4, result.Count, "Expected 4 WindowMetrics responses");
752 // TODO: Guarantee order once ARE-3264 is complete
753 });
754 }
755
756 [TestMethod, TestCategory(TypeName)]
758 {
759 // Check that even though the server will return a non array response,
760 // we can still handle it and put it into the expected list response.
762 {
763 List<WindowMetrics> result = view.window_metrics_blocking(
764 new List<ProbabilityWindow> { ProbabilityWindow.Tail(0.1) },
766
767 Assert.AreEqual(1, result.Count, "Expected 1 WindowMetrics response in a list");
768 AssertApi.DoublesAreEqual(0, result.First().context.min_probability);
769 AssertApi.DoublesAreEqual(0.1, result.First().context.max_probability);
770 });
771 }
772
773 [TestMethod, TestCategory(TypeName)]
775 {
776 // Test the ability to invoke all overloads and extension methods of window_metrics,
777 // including any type conversions or inferences meant to be supported.
779 {
781 arg.window_metrics_blocking(new[] { ProbabilityWindow.All }, options);
782 arg.window_metrics_blocking(new List<ProbabilityWindow> { ProbabilityWindow.All,
783 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, options);
784 arg.window_metrics_blocking(new HashSet<ProbabilityWindow> { ProbabilityWindow.All,
785 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, options);
786 arg.window_metrics_blocking(Enumerable.Range(1, 10).Select(i => ProbabilityWindow.Tail(1d / i)), options);
787 arg.window_metrics(new[] { ProbabilityWindow.All }, options);
788 arg.window_metrics(new List<ProbabilityWindow> { ProbabilityWindow.All,
789 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, options);
790 arg.window_metrics(new HashSet<ProbabilityWindow> { ProbabilityWindow.All,
791 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, options);
792 arg.window_metrics(Enumerable.Range(1, 10).Select(i => ProbabilityWindow.Tail(1d / i)), options);
793 });
794 }
795
796 [TestMethod, TestCategory(TypeName)]
797 [Obsolete("Tests an obsolete method for backwards compatibility.")]
799 {
800 // Test the ability to invoke all obsolete overloads and extension methods of
801 // window_metrics, including any type conversions or inferences meant to be supported.
803 {
805 arg.window_metrics_blocking(1, options);
806 arg.window_metrics_blocking(0.5, options);
807 arg.window_metrics(1, options);
808 arg.window_metrics(0.5, options);
809 });
810 }
811 #endregion Test Methods
812 #endregion WindowMetrics
813
814 #region ExceedanceProbability
815 #region Helper Methods
817 double threshold, ExceedanceProbabilityOptions options = null)
818 {
820 arg => GetExceedanceProbabilityAction(threshold, options)(arg));
821 }
822
823 private void Test_IAPIResourceView_GET_ExceedanceProbability_Fails<TException>(
824 Action<TException> exceptionTest, double threshold, ExceedanceProbabilityOptions options)
825 where TException : Exception
826 {
827 Test_IAPIResourceView_Action_Fails(
828 arg => GetExceedanceProbabilityAction(threshold, options)(arg), exceptionTest);
829 }
830
831 private static Func<T, ExceedanceProbability> GetExceedanceProbabilityAction(
832 double threshold, ExceedanceProbabilityOptions options)
833 {
834 return view =>
835 {
836 ExceedanceProbability result = view.exceedance_probability_blocking(
837 threshold, options, SimulationPolling);
838 // Check that the probability was set
839 Assert.AreNotEqual(Double.NaN, result.probability, "probability was not assigned a value.");
840 // Check that exceedance probability context properties are filled in appropriately
841 RunExceedanceProbContextAssertions(view, threshold, options, result.context);
842 return result;
843 };
844 }
845
846 private static void RunExceedanceProbContextAssertions(
847 T view, double threshold, ExceedanceProbabilityOptions options,
849 {
850 // If no options were supplied, compare result context to a default set of options.
851 options = options ?? new ExceedanceProbabilityOptions();
852
853 // Assertions that should be met as part of any successful exceedance probabilities result
854 AssertBasicOptionsMatchContext(view, options, context);
855 AssertApi.DoublesAreEqual(threshold, context.threshold);
856 Assert.AreEqual(options.inclusive_threshold ?? false, context.inclusive_threshold);
857 Assert.AreEqual(options.threshold_currency ?? view.target_currency, context.threshold_currency);
858
859 // If this is a PortfolioView, participation is always applied regardless of
860 // the request parameter.
861 if (view is PortfolioView)
862 Assert.IsTrue(context.threshold_includes_participation);
863 // If this is a LayerView, apply_participation defaults to false if not supplied.
864 else
865 Assert.AreEqual(options.threshold_includes_participation ?? false,
866 context.threshold_includes_participation);
867 }
868 #endregion Helper Methods
869
870 #region Test Methods
872 [TestMethod, TestCategory(TypeName)]
874 {
882 // In theory, Double.Min to Double.Max are supported, but the string representation
883 // of these numbers is necessarily limited and rounds to invalid doubles,
884 // so to illustrate getting very close to these boundaries, use 99% of min and max
887 }
888
889 [TestMethod, TestCategory(TypeName)]
891 {
893 {
894 List<IReference<LossFilter>> availableFilters =
895 Reflection.Resolve(view.analysis_profile).loss_filters;
897 foreach (IReference<LossFilter> lf in availableFilters)
898 GetExceedanceProbabilityAction(0, options.Change(o => o.filter,
899 Reflection.Resolve(lf).name))(view);
900 });
901 }
902
903 [TestMethod, TestCategory(TypeName)]
905 {
906 Test_IAPIResourceView_GET_ExceedanceProbability_Fails(
907 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest),
908 1, new ExceedanceProbabilityOptions(filter: "NonExistant"));
909 }
910
911 #region Test Perspectives
914 private void TestGetExceedanceProbabilityForPerspective(T view, Perspective persp) =>
915 AssertApi.MethodIsAllowed(() => GetExceedanceProbabilityAction(0,
916 new ExceedanceProbabilityOptions(perspective: persp))(view),
917 $"Get ExceedanceProbability for perspective {persp}");
918
919 [TestMethod, TestCategory(TypeName)]
921 {
923 TestGetExceedanceProbabilityForPerspective(view, perspective)));
924 }
925
926 [TestMethod, TestCategory(TypeName)]
927 [Obsolete("Tests an obsolete factory method for constructing legacy perspectives")]
929 {
931 TestGetExceedanceProbabilityForPerspective(view, perspective)));
932 }
933 #endregion Test Perspectives
934
935 [TestMethod, TestCategory(TypeName)]
937 {
939 {
940 ExceedanceProbabilityOptions exclusiveOptions = ExceedanceProbabilityOptions.Default
941 .Change(o => o.inclusive_threshold, false);
942 double exclusive = GetExceedanceProbabilityAction(0, exclusiveOptions)(view).probability;
943
944 ExceedanceProbabilityOptions inclusiveOptions = ExceedanceProbabilityOptions.Default
945 .Change(o => o.inclusive_threshold, true);
946 double inclusive = GetExceedanceProbabilityAction(0, inclusiveOptions)(view).probability;
947 Console.WriteLine("Exceedance Probability for a threshold of 0 - " +
948 $"Inclusive: {exclusive:P2} Exclusive: {inclusive:P2}");
949
950 // Get the largest trial loss for this view. We should be able to test
951 // that results when including and excluding it are different
952 double max = view.tail_metrics_blocking(1.0, MetricsOptions.Default).max;
953 exclusive = GetExceedanceProbabilityAction(max, exclusiveOptions)(view).probability;
954 Assert.AreEqual(0, exclusive, "Expected the exclusive exceedance probability " +
955 $"of {max} to be 0%, because it is the largest loss (nothing exceeds it).");
956
957 // Verify that the inclusive probability is different.
958 inclusive = GetExceedanceProbabilityAction(max, inclusiveOptions)(view).probability;
959 Console.WriteLine($"Exceedance Probability for a threshold of {max} - " +
960 $"Inclusive: {exclusive:P2} Exclusive: {inclusive:P2}");
961 Assert.AreNotEqual(exclusive, inclusive, "Expected the inclusive and " +
962 "exclusive exceedance probabilities to be different.");
963
964 // Check that by default, the value returned is exclusive.
966 double defaultV = GetExceedanceProbabilityAction(max, defaultOptions)(view).probability;
967 Assert.AreEqual(defaultV, exclusive,
968 "Expected the default result to match the result of " +
969 "explicitly excluding the threshold for the exceedance probability.");
970 });
971 }
972
973 [TestMethod, TestCategory(TypeName)]
979
980 [TestMethod, TestCategory(TypeName)]
982 {
983 Test_IAPIResourceView_GET_ExceedanceProbability_Fails(
984 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest), 1,
985 new ExceedanceProbabilityOptions(threshold_currency: "ALL"));
986 }
987
988 [TestMethod, TestCategory(TypeName)]
994
995 [TestMethod, TestCategory(TypeName)]
1004
1005 [TestMethod, TestCategory(TypeName)]
1007 {
1009 reporting_period: new ReportingPeriod(DateTime.UtcNow, DateTime.UtcNow.AddYears(1))));
1010 }
1011
1012 [TestMethod, TestCategory(TypeName)]
1014 {
1016 new ExceedanceProbabilityOptions(threshold_includes_participation: true));
1017
1019 new ExceedanceProbabilityOptions(threshold_includes_participation: false));
1020 }
1021
1022 [TestMethod, TestCategory(TypeName)]
1024 {
1026 {
1027 List<ExceedanceProbability> result = view.exceedance_probability_blocking(
1028 new List<double> { 10000, 0, 100, 1000 },
1030
1031 // Assertions that should be met as part of any successful tail metrics result
1032 Assert.AreEqual(4, result.Count, "Expected 4 TailMetrics responses");
1033 // TODO: Guarantee order once ARE-3264 is complete
1034 /*
1035 AssertApi.DoublesAreEqual(0, result.First().context.threshold);
1036 AssertApi.DoublesAreEqual(100, result.Skip(1).First().context.threshold);
1037 AssertApi.DoublesAreEqual(1000, result.Skip(2).First().context.threshold);
1038 AssertApi.DoublesAreEqual(10000, result.Skip(3).First().context.threshold);
1039 */
1040 });
1041 }
1042
1043 [TestMethod, TestCategory(TypeName)]
1045 {
1046 // Test the ability to invoke all overloads and extension methods of window_metrics,
1047 // including any type conversions or inferences meant to be supported.
1049 {
1051 arg.exceedance_probability_blocking(1, options);
1052 arg.exceedance_probability_blocking(1.23E6, options);
1053 arg.exceedance_probability_blocking(new[] { 1.23E6 }, options);
1054 arg.exceedance_probability_blocking(new List<double> { 1, 2, 3 }, options);
1055 arg.exceedance_probability_blocking(new HashSet<double> { 1, 0.5, 0.25 }, options);
1056 arg.exceedance_probability_blocking(Enumerable.Range(1, 10).Select(i => 1d / i), options);
1057 arg.exceedance_probability(1, options);
1058 arg.exceedance_probability(1.23E6, options);
1059 arg.exceedance_probability(new[] { 1.23E6 }, options);
1060 arg.exceedance_probability(new List<double> { 1, 2, 3 }, options);
1061 arg.exceedance_probability(new HashSet<double> { 1, 0.5, 0.25 }, options);
1062 arg.exceedance_probability(Enumerable.Range(1, 10).Select(i => 1d / i), options);
1063 });
1064 }
1065 #endregion Test Methods
1066 #endregion ExceedanceProbability
1067
1068 #region CoMetrics
1069 #region Helper Methods
1071 private IReference<ILayerView> GetLayerViewComponent()
1072 {
1074 Assert.Inconclusive("RUN_OFFLINE = true");
1075 if (_layerViewComponentSingleton != null)
1076 return _layerViewComponentSingleton;
1077 CatXL component = Samples.Layer_CatXL.Unposted;
1078 component.description = "Component Layer";
1079 ILayerView<CatXL> lv = LayerViews.Create(component,
1080 TestResource.analysis_profile).Post();
1081 _layerViewComponentSingleton = lv.ToReference();
1082 return _layerViewComponentSingleton;
1083 }
1084
1086 private IReference<PortfolioView> GetPortfolioViewComponent()
1087 {
1089 Assert.Inconclusive("RUN_OFFLINE = true");
1090 IReference<ILayerView> layerView = GetLayerViewComponent();
1092 {
1093 layer_views = new HashSet<IReference<ILayerView>> { layerView },
1094 analysis_profile = TestResource.analysis_profile
1095 };
1096 return pv.Post().ToReference();
1097 }
1098
1099 private void Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow range,
1100 IReference<IAPIResourceView> component, CoMetricsOptions options = null,
1101 Action<CoMetrics> additionalAssertions = null)
1102 {
1104 {
1105 CoMetrics response = GetCoMetricsAction(range, component, options)(arg);
1106 additionalAssertions?.Invoke(response);
1107 });
1108 }
1109
1110 private void Test_IAPIResourceView_GET_CoMetrics_Fails<TException>(
1111 Action<TException> exceptionTest, ProbabilityWindow range,
1112 IReference<IAPIResourceView> component, CoMetricsOptions options = null)
1113 where TException : Exception
1114 {
1115 Test_IAPIResourceView_Action_Fails(
1116 arg => GetCoMetricsAction(range, component, options)(arg), exceptionTest);
1117 }
1118
1119 private static Func<T, CoMetrics> GetCoMetricsAction(ProbabilityWindow range,
1120 IReference<IAPIResourceView> component, CoMetricsOptions options = null)
1121 {
1122 return view =>
1123 {
1124 CoMetrics result = view.co_metrics_blocking(
1125 range, component, options, SimulationPolling);
1126 // Check that all co-metrics result properties are filled in.
1127 AssertConditionalDistributionMetricsExist(result.component_metrics);
1128 // Check that co-metrics context properties are filled in appropriately
1129 RunCoMetricsContextAssertions(view, range, component, options, result.context);
1130 // If the primary metrics were requested, ensure they were all filled in.
1131 if (options?.include_primary_metrics ?? false)
1132 AssertTailDistributionMetricsExist(result.primary_metrics);
1133 return result;
1134 };
1135 }
1136
1139 private static void AssertConditionalDistributionMetricsExist(ConditionalDistributionMetrics result)
1140 {
1141 Assert.IsNotNull(result);
1142 Assert.AreNotEqual(Double.NaN, result.min, "min was not assigned a value.");
1143 Assert.AreNotEqual(Double.NaN, result.mean, "mean was not assigned a value.");
1144 Assert.AreNotEqual(Double.NaN, result.covariance, "covariance was not assigned a value.");
1145 Assert.AreNotEqual(Double.NaN, result.correlation, "correlation was not assigned a value.");
1146 }
1147
1148 private static void RunCoMetricsContextAssertions(
1149 T view, ProbabilityWindow range, IReference<IAPIResourceView> component,
1150 CoMetricsOptions options, CoMetrics.Context context)
1151 {
1152 // First, all of the same assertions made in the TailMetrics response should hold true
1153 RunWindowMetricsContextAssertions(view, range, options, context);
1154
1155 // If no options were supplied, compare result context to a default set of options.
1156 options = options ?? new CoMetricsOptions();
1157
1158 // Additional assertions that should be met as part of any successful co-metrics result
1159 // Component references should be equivalent.
1160 Assert.AreEqual(component, context.component);
1161 // Component filter should match,
1162 // or default to the primary filter if no component filter was specified,
1163 // or default to the Analysis Profile AnyFilter if neither was specified.
1164 string expectedComponentFilter = options.component_filter ?? options.filter ?? TestSuite_AnalysisProfile_Base.GetDefaultFilterNameForAnalysisProfile(view.analysis_profile);
1165 Assert.AreEqual(expectedComponentFilter, context.component_filter);
1166 // Component perspective should match,
1167 // or default to the primary perspective if no component filter was specified,
1168 // or default to LossNetOfAggregateTerms if neither was specified.
1169 Perspective expectedComponentPerspective = options.component_perspective ??
1170 options.perspective ?? TestSuite_Perspective.GetDefaultPerspectiveForAnalysisProfile(view.analysis_profile);
1171 Assert.AreEqual(expectedComponentPerspective, context.component_perspective);
1172
1173 // Check that component reporting period matches within tolerance (if set),
1174 // or defaults to the primary reporting period if it was set.
1176 options.reporting_period?.begin, context.component_reporting_period_begin);
1178 options.reporting_period?.end, context.component_reporting_period_end);
1179
1180 Assert.AreEqual(options.include_primary_metrics ?? false,
1181 context.include_primary_metrics);
1182
1183 // If this is a PortfolioView, participation is always applied regardless of
1184 // the request parameter.
1185 if (view is PortfolioView)
1186 Assert.IsTrue(context.apply_participation);
1187 // If this is a LayerView, apply_participation defaults to false if not supplied.
1188 else
1189 Assert.AreEqual(options.apply_participation ?? false, context.apply_participation);
1190 }
1191 #endregion Helper Methods
1192
1193 #region Test Methods
1194 [TestMethod, TestCategory(TypeName)]
1196 {
1197 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All, GetLayerViewComponent());
1198 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.Tail(0.01), GetLayerViewComponent());
1199 // Test a high precision value survives the round trip.
1200 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.Tail(Math.PI / 4d), GetLayerViewComponent());
1201 Test_IAPIResourceView_GET_CoMetrics_Succeeds(new ProbabilityWindow(Math.PI / 4d, 1), GetLayerViewComponent());
1202 // Test a very small probability maintains precision
1203 Test_IAPIResourceView_GET_CoMetrics_Succeeds(new ProbabilityWindow(0, 1.234556789E-12), GetLayerViewComponent());
1204 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.Tail(1.234556789E-12), GetLayerViewComponent());
1205 }
1206
1207 [TestMethod, TestCategory(TypeName)]
1209 {
1210 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All, GetPortfolioViewComponent());
1211 Test_IAPIResourceView_GET_CoMetrics_Succeeds(new ProbabilityWindow(0, 0.01), GetLayerViewComponent());
1212 // Test a high precision value survives the round trip.
1213 Test_IAPIResourceView_GET_CoMetrics_Succeeds(new ProbabilityWindow(0, Math.PI / 4d), GetPortfolioViewComponent());
1214 Test_IAPIResourceView_GET_CoMetrics_Succeeds(new ProbabilityWindow(Math.PI / 4d, 1), GetPortfolioViewComponent());
1215 // Test a very small probability maintains precision
1216 Test_IAPIResourceView_GET_CoMetrics_Succeeds(new ProbabilityWindow(0, 1.234556789E-12), GetPortfolioViewComponent());
1217 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.Tail(1.234556789E-12), GetPortfolioViewComponent());
1218 }
1219
1220 [TestMethod, TestCategory(TypeName)]
1222 {
1223 void TestBadProbability(double min, double max)
1224 {
1225 // Test that the appropriate client-side validation occurs when creating this probability range:
1226 AssertApi.ExceptionThrown(() => _ = new ProbabilityWindow(min, max), ProbabilityError(min, max));
1227 // Circumvent client-side validation and assert that the server returns an error:
1228 Test_IAPIResourceView_Action_Fails(
1229 arg => arg.GetSubResource<WindowMetrics>($"window_co_metrics/{min}_{max}/" +
1230 $"?component_id={arg.id}&component_type=LayerView"),
1231 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest));
1232 }
1233
1234 TestBadProbability(0, 0);
1235 TestBadProbability(1, 1);
1236 TestBadProbability(0, 1.00000000000001);
1237 TestBadProbability(0, 2);
1238 TestBadProbability(-.1, 0);
1239 TestBadProbability(0, Double.MaxValue);
1240 TestBadProbability(Double.MinValue, 0);
1241 TestBadProbability(0, Double.MinValue);
1242 TestBadProbability(Double.MaxValue, 0);
1243 TestBadProbability(.5, .4);
1244 TestBadProbability(1, 0);
1245 }
1246
1247 [TestMethod, TestCategory(TypeName)]
1249 {
1250 // ARE-7344 /co_metrics/ can be used to get tail metrics using the API.RequestAndParse method, which
1251 // then should be successfully casted to the CoMetrics object identical to the object returned by the
1252 // co_metrics_blocking method.
1254 {
1255 Double probability = 0.25;
1256 CoMetrics regular_metrics = view.co_metrics_blocking(ProbabilityWindow.Tail(0.25), GetLayerViewComponent(),
1258 var queryParams = CoMetricsOptions.Default.GetParameters();
1259 queryParams.AddParameter("component_id", GetLayerViewComponent().ref_id, RestSharp.ParameterType.QueryString);
1260 queryParams.AddParameter("component_type", "LayerView", RestSharp.ParameterType.QueryString);
1261 CoMetrics endpoint_metrics = API.RequestAndParse<CoMetrics>($"/{view.collection_name}/{view.id}/co_metrics/{probability}",
1262 Method.GET, queryParams);
1263
1264 Assert.AreEqual(regular_metrics.context.probability, endpoint_metrics.context.probability);
1265 Assert.AreEqual(regular_metrics.context.max_probability, endpoint_metrics.context.max_probability);
1266 Assert.AreEqual(regular_metrics.context.min_probability, endpoint_metrics.context.min_probability);
1267 Assert.AreEqual(regular_metrics.component_metrics.mean, endpoint_metrics.component_metrics.mean);
1268 Assert.AreEqual(regular_metrics.component_metrics.covariance, endpoint_metrics.component_metrics.covariance);
1269 Assert.AreEqual(regular_metrics.component_metrics.correlation, endpoint_metrics.component_metrics.correlation);
1270 });
1271 }
1272
1273 #region Filter Tests
1274 [TestMethod, TestCategory(TypeName)]
1276 {
1277 IReference<IAPIResourceView>[] components = {
1278 GetLayerViewComponent(), GetPortfolioViewComponent()
1279 };
1281 {
1282 List<IReference<LossFilter>> availableFilters =
1283 Reflection.Resolve(view.analysis_profile).loss_filters;
1284 foreach (IReference<IAPIResourceView> component in components)
1285 foreach (IReference<LossFilter> lf in availableFilters)
1286 {
1287 string filterName = Reflection.Resolve(lf).name;
1288 GetCoMetricsAction(ProbabilityWindow.All, component,
1289 new CoMetricsOptions(filter: filterName))(view);
1290 GetCoMetricsAction(ProbabilityWindow.All, component,
1291 new CoMetricsOptions(filterName))(view);
1292 }
1293 });
1294 }
1295
1296 [TestMethod, TestCategory(TypeName)]
1298 {
1299 Test_IAPIResourceView_GET_CoMetrics_Fails(
1300 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest), ProbabilityWindow.All,
1301 GetLayerViewComponent(), new CoMetricsOptions(filter: "NonExistant"));
1302 }
1303
1304 [TestMethod, TestCategory(TypeName)]
1306 {
1307 Test_IAPIResourceView_GET_CoMetrics_Fails(
1308 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest), ProbabilityWindow.All,
1309 GetLayerViewComponent(), new CoMetricsOptions("NonExistant"));
1310 }
1311 // TODO: Test where filter and component_filter are different
1312 #endregion Filter Tests
1313
1314 #region Perspective Tests
1317 private void TestGetCoMetricsForPerspective(T view, IReference<IAPIResourceView> component, Perspective persp)
1318 {
1319 // Test Setting just the primary perspective to this
1320 GetCoMetricsAction(ProbabilityWindow.All, component,
1321 new CoMetricsOptions(perspective: persp))(view);
1322 // Test Setting just the component perspective to this
1323 GetCoMetricsAction(ProbabilityWindow.All, component,
1324 new CoMetricsOptions(component_perspective: persp))(view);
1325 // Test setting every possible component perspective while the primary is set to this
1326 // TODO: This next test is over-kill, causing co_metrics tests to make O(n^2) requests
1327 // and run a lot longer (30s to 1min) each. So I have disable testing every single
1328 // possible combination of primary and component perspective.
1329 //foreach (Perspective componentPerspective in TestSuite_Perspective.TestPerspectives)
1330 // GetCoMetricsAction(ProbabilityWindow.All, component, new CoMetricsOptions(
1331 // perspective: persp, component_perspective: componentPerspective))(view);
1332 }
1333
1334 [TestMethod, TestCategory(TypeName)]
1336 {
1337 List<IReference<IAPIResourceView>> testComponents =
1338 new List<IReference<IAPIResourceView>> { GetLayerViewComponent(), GetPortfolioViewComponent() };
1340 testComponents.ForEach(component => TestSuite_Perspective.TestPerspectives.ToList().ForEach(perspective =>
1341 TestGetCoMetricsForPerspective(view, component, perspective))));
1342 }
1343
1344 [TestMethod, TestCategory(TypeName)]
1345 [Obsolete("Tests an obsolete factory method for constructing legacy perspectives")]
1347 {
1348 List<IReference<IAPIResourceView>> testComponents =
1349 new List<IReference<IAPIResourceView>> { GetLayerViewComponent(), GetPortfolioViewComponent() };
1351 testComponents.ForEach(component => TestSuite_Perspective.LegacyTestPerspectives.ToList().ForEach(perspective =>
1352 TestGetCoMetricsForPerspective(view, component, perspective))));
1353 }
1354 #endregion Perspective Tests
1355
1356 [TestMethod, TestCategory(TypeName)]
1358 {
1359 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1360 GetLayerViewComponent(), new CoMetricsOptions(currency: "GBP"));
1361 }
1362
1363 [TestMethod, TestCategory(TypeName)]
1365 {
1366 Test_IAPIResourceView_GET_CoMetrics_Fails(
1367 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest), ProbabilityWindow.All,
1368 GetLayerViewComponent(), new CoMetricsOptions(currency: "ALL"));
1369 }
1370
1371 [TestMethod, TestCategory(TypeName)]
1373 {
1374 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1375 GetLayerViewComponent(), new CoMetricsOptions(include_primary_metrics: true),
1376 cometrics => Assert.IsNotNull(cometrics.primary_metrics));
1377 }
1378
1379 [TestMethod, TestCategory(TypeName)]
1381 {
1382 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1383 GetLayerViewComponent(), new CoMetricsOptions(include_primary_metrics: false),
1384 cometrics => Assert.IsNull(cometrics.primary_metrics));
1385 }
1386
1387 [TestMethod, TestCategory(TypeName)]
1389 {
1390 foreach (AggregationMethod aggMethod in Enum<AggregationMethod>.Values)
1391 {
1392 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1393 GetLayerViewComponent(), new CoMetricsOptions(aggregation_method: aggMethod));
1394 }
1395 }
1396
1397 [TestMethod, TestCategory(TypeName)]
1399 {
1400 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1401 GetLayerViewComponent(), new CoMetricsOptions(secondary_uncertainty: true));
1402
1403 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1404 GetLayerViewComponent(), new CoMetricsOptions(secondary_uncertainty: false));
1405 }
1406
1407 [TestMethod, TestCategory(TypeName)]
1409 {
1410 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All, GetLayerViewComponent(),
1411 new CoMetricsOptions(reporting_period:
1412 new ReportingPeriod(DateTime.UtcNow, DateTime.UtcNow.AddYears(1))));
1413 }
1414
1415 [TestMethod, TestCategory(TypeName)]
1417 {
1418 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All, GetLayerViewComponent(),
1419 new CoMetricsOptions(component_reporting_period:
1420 new ReportingPeriod(DateTime.UtcNow, DateTime.UtcNow.AddYears(1))));
1421 }
1422
1423 [TestMethod, TestCategory(TypeName)]
1425 {
1426 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1427 GetLayerViewComponent(), new CoMetricsOptions(apply_participation: true));
1428
1429 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All,
1430 GetLayerViewComponent(), new CoMetricsOptions(apply_participation: false));
1431 }
1432
1433 [TestMethod, TestCategory(TypeName)]
1435 {
1436 DateTime now = DateTime.UtcNow;
1437 // Server should maintain both periods separately, regardless of how they intersect.
1438 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All, GetLayerViewComponent(),
1439 new CoMetricsOptions(
1440 reporting_period: new ReportingPeriod(now.AddYears(-1), now.AddYears(1)),
1441 component_reporting_period: new ReportingPeriod(now, now.AddMonths(3))),
1442 result =>
1443 {
1444 AssertApi.DatesAreEqual(now.AddYears(-1), result.context.reporting_period_begin);
1445 AssertApi.DatesAreEqual(now.AddYears(1), result.context.reporting_period_end);
1446 AssertApi.DatesAreEqual(now, result.context.component_reporting_period_begin);
1447 AssertApi.DatesAreEqual(now.AddMonths(3), result.context.component_reporting_period_end);
1448 });
1449 }
1450
1451 [TestMethod, TestCategory(TypeName)]
1453 {
1454 DateTime now = DateTime.UtcNow;
1455 // Server should default reporting period begin and end separately if only one is set.
1456 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All, GetLayerViewComponent(),
1457 new CoMetricsOptions(
1458 reporting_period: new ReportingPeriod(now, null),
1459 component_reporting_period: new ReportingPeriod(null, now.AddMonths(3))),
1460 result =>
1461 {
1462 AssertApi.DatesAreEqual(now, result.context.reporting_period_begin);
1463 AssertApi.DatesAreEqual(now, result.context.component_reporting_period_begin);
1464 Assert.IsNull(result.context.reporting_period_end);
1465 AssertApi.DatesAreEqual(now.AddMonths(3), result.context.component_reporting_period_end);
1466 });
1467 Test_IAPIResourceView_GET_CoMetrics_Succeeds(ProbabilityWindow.All, GetLayerViewComponent(),
1468 new CoMetricsOptions(
1469 reporting_period: new ReportingPeriod(now.AddYears(-1), now.AddYears(1)),
1470 component_reporting_period: new ReportingPeriod(now, null)),
1471 result =>
1472 {
1473 AssertApi.DatesAreEqual(now.AddYears(-1), result.context.reporting_period_begin);
1474 AssertApi.DatesAreEqual(now.AddYears(1), result.context.reporting_period_end);
1475 AssertApi.DatesAreEqual(now, result.context.component_reporting_period_begin);
1476 AssertApi.DatesAreEqual(now.AddYears(1), result.context.component_reporting_period_end);
1477 });
1478 }
1479
1480 [TestMethod, TestCategory(TypeName)]
1482 {
1483 DateTime now = DateTime.UtcNow;
1484 // If the user just sets part of the component reporting period, and the other
1485 // part defaults such that the date range is invalid, the server should raise an error.
1486 Test_IAPIResourceView_GET_CoMetrics_Fails(
1487 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest), ProbabilityWindow.All, GetLayerViewComponent(),
1488 new CoMetricsOptions(
1489 reporting_period: new ReportingPeriod(now, null),
1490 // Normally a valid reporting period, but the component reporting period begin
1491 // will default to the primary begin, resulting in an invalid reporting period
1492 component_reporting_period: new ReportingPeriod(null, now.AddDays(-1))));
1493 Test_IAPIResourceView_GET_CoMetrics_Fails(
1494 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest), ProbabilityWindow.All, GetLayerViewComponent(),
1495 new CoMetricsOptions(
1496 reporting_period: new ReportingPeriod(now, now.AddYears(1)),
1497 component_reporting_period: new ReportingPeriod(now.AddYears(2), null)));
1498 }
1499
1500 [TestMethod, TestCategory(TypeName)]
1502 {
1504 {
1505 List<CoMetrics> result = view.co_metrics_blocking(
1506 new List<ProbabilityWindow> {
1511 },
1512 GetLayerViewComponent(), CoMetricsOptions.Default, SimulationPolling);
1513
1514 // Assertions that should be met as part of any successful tail metrics result
1515 Assert.AreEqual(4, result.Count, "Expected 4 TailMetrics responses");
1516 // TODO: Guarantee order once ARE-3264 is complete
1517 /*
1518 AssertApi.DoublesAreEqual(1, result.First().context.probability);
1519 AssertApi.DoublesAreEqual(0.5, result.Skip(1).First().context.probability);
1520 AssertApi.DoublesAreEqual(0.25, result.Skip(2).First().context.probability);
1521 AssertApi.DoublesAreEqual(0.1, result.Skip(3).First().context.probability);
1522 */
1523 });
1524 }
1525
1526 [TestMethod, TestCategory(TypeName)]
1528 {
1529 // Test the ability to invoke all overloads and extension methods of window_metrics,
1530 // including any type conversions or inferences meant to be supported.
1532 {
1534 arg.co_metrics_blocking(new[] { ProbabilityWindow.All }, arg.ToReference(), options);
1535 arg.co_metrics_blocking(new List<ProbabilityWindow> { ProbabilityWindow.All,
1536 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, arg.ToReference(), options);
1537 arg.co_metrics_blocking(new HashSet<ProbabilityWindow> { ProbabilityWindow.All,
1538 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, arg.ToReference(), options);
1539 arg.co_metrics_blocking(Enumerable.Range(1, 10).Select(i => ProbabilityWindow.Tail(1d / i)),
1540 arg.ToReference(), options);
1541 arg.co_metrics(new[] { ProbabilityWindow.All }, arg.ToReference(), options);
1542 arg.co_metrics(new List<ProbabilityWindow> { ProbabilityWindow.All,
1543 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, arg.ToReference(), options);
1544 arg.co_metrics(new HashSet<ProbabilityWindow> { ProbabilityWindow.All,
1545 ProbabilityWindow.Tail(0.5), new ProbabilityWindow(0.25, 0.75) }, arg.ToReference(), options);
1546 arg.co_metrics(Enumerable.Range(1, 10).Select(i => ProbabilityWindow.Tail(1d / i)),
1547 arg.ToReference(), options);
1548 });
1549 }
1550 #endregion Test Methods
1551
1552 #region Legacy Test Methods
1553 #region Helper Methods
1554 [Obsolete("Tests an obsolete method for backwards compatibility.")]
1555 private void Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(double probability,
1556 IReference<IAPIResourceView> component, CoMetricsOptions options = null,
1557 Action<CoMetrics> additionalAssertions = null)
1558 {
1560 {
1561 CoMetrics response = GetCoMetricsLegacyAction(probability, component, options)(arg);
1562 additionalAssertions?.Invoke(response);
1563 });
1564 }
1565
1566 [Obsolete("Tests an obsolete method for backwards compatibility.")]
1567 private static Func<T, CoMetrics> GetCoMetricsLegacyAction(double probability,
1568 IReference<IAPIResourceView> component, CoMetricsOptions options = null)
1569 {
1570 return view =>
1571 {
1572 CoMetrics result = view.co_metrics_blocking(
1573 probability, component, options, SimulationPolling);
1574 // Check that all co-metrics result properties are filled in.
1575 AssertConditionalDistributionMetricsExist(result.component_metrics);
1576 // Check that co-metrics context properties are filled in appropriately
1577 RunCoMetricsContextAssertions(view, probability, component, options, result.context);
1578 // If the primary metrics were requested, ensure they were all filled in.
1579 if (options?.include_primary_metrics ?? false)
1580 AssertTailDistributionMetricsExist(result.primary_metrics);
1581 return result;
1582 };
1583 }
1584
1585 [Obsolete("Tests an obsolete method for backwards compatibility.")]
1586 private static void RunCoMetricsContextAssertions(
1587 T view, double probability, IReference<IAPIResourceView> component,
1588 CoMetricsOptions options, CoMetrics.Context context)
1589 {
1590 // First, all of the same assertions made in the TailMetrics response should hold true
1591 RunTailMetricsContextAssertions(view, probability, options, context);
1592
1593 RunCoMetricsContextAssertions(view, ProbabilityWindow.Tail(probability),
1594 component, options, context);
1595 }
1596 #endregion Helper Methods
1597
1598 [TestMethod, TestCategory(TypeName)]
1599 [Obsolete("Tests an obsolete method for backwards compatibility.")]
1601 {
1602 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(1, GetLayerViewComponent());
1603 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(0.01, GetLayerViewComponent());
1604 // Test a high precision value survives the round trip.
1605 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(Math.PI / 4d, GetLayerViewComponent());
1606 // Test a very small probability maintains precision
1607 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(1.234556789E-12, GetLayerViewComponent());
1608 }
1609
1610 [TestMethod, TestCategory(TypeName)]
1611 [Obsolete("Tests an obsolete method for backwards compatibility.")]
1613 {
1614 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(1, GetPortfolioViewComponent());
1615 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(0.01, GetPortfolioViewComponent());
1616 // Test a high precision value survives the round trip.
1617 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(Math.PI / 4d, GetPortfolioViewComponent());
1618 // Test a very small probability maintains precision
1619 Test_IAPIResourceView_GET_CoMetrics_Legacy_Succeeds(1.234556789E-12, GetPortfolioViewComponent());
1620 }
1621
1622 [TestMethod, TestCategory(TypeName)]
1623 [Obsolete("Tests an obsolete method for backwards compatibility.")]
1625 {
1626 IReference<ILayerView> component = GetLayerViewComponent();
1627 void TestBadProbability(double probability) =>
1628 // Test that the appropriate client-side validation occurs
1629 Test_IAPIResourceView_Action_Fails(
1630 arg => GetCoMetricsLegacyAction(probability, component, CoMetricsOptions.Default)(arg),
1631 ProbabilityError(0, probability));
1632
1633 TestBadProbability(0);
1634 TestBadProbability(1.00000000000001);
1635 TestBadProbability(2);
1636 TestBadProbability(-0.5);
1637 TestBadProbability(Double.MinValue);
1638 TestBadProbability(Double.MaxValue);
1639 }
1640
1641 [TestMethod, TestCategory(TypeName)]
1642 [Obsolete("Tests an obsolete method for backwards compatibility.")]
1644 {
1646 {
1647 List<CoMetrics> result = view.co_metrics_blocking(
1648 new List<double> { 0.1, 1, 0.5, 0.25 },
1649 GetLayerViewComponent(), CoMetricsOptions.Default, SimulationPolling);
1650
1651 // Assertions that should be met as part of any successful tail metrics result
1652 Assert.AreEqual(4, result.Count, "Expected 4 TailMetrics responses");
1653 });
1654 }
1655 #endregion Legacy Test Methods
1656 #endregion CoMetrics
1657 #endregion Metrics
1658
1659 #region File Download Endpoints
1660 #region Legacy Back-Allocation
1661 [Obsolete("Tests the legacy back_allocations endpoint")]
1663 {
1664 Assert.IsNotNull(result.layer_view);
1665 Assert.IsNotNull(result.layer_view.ref_id);
1666 // Verify that the reference can be resolved
1667 ILayerView back_allocated_layer = result.layer_view.GetValue();
1668 Assert.AreEqual(result.layer_view.ref_id, back_allocated_layer.id);
1669 }
1670
1671 [TestMethod, TestCategory(TypeName), Obsolete("Tests the legacy back_allocations endpoint")]
1673 {
1674 // Simplest case: Test back-allocating a view to itself.
1676 {
1677 BackAllocations result = view.back_allocations_blocking(view.id, SimulationPolling);
1679 });
1680 }
1681
1682 [TestMethod, TestCategory(TypeName), Obsolete("Tests the legacy back_allocations endpoint")]
1684 {
1685 Test_IAPIResourceView_Action_Fails(view =>
1686 _ = view.back_allocations_blocking(Samples.Valid_NonExistant_UUID, SimulationPolling),
1687 AssertApi.ApiExceptionTest(HttpStatusCode.BadRequest));
1688 }
1689 #endregion Legacy Back-Allocation
1690
1691 #region YLT
1692 [TestMethod, TestCategory(TypeName)]
1693 public virtual void Test_IAPIResourceView_GET_YLT()
1694 {
1696 {
1697 string result = view.ylt.Get(MetricsOptions.Default, true);
1698 Assert.IsNotNull(result);
1699 // TODO: Test that the YLT is in an expected format
1700 });
1701 }
1702
1703 // TODO: Test YLT GET Parameters
1704 #endregion YLT
1705
1706 #region YELT
1707 [TestMethod, TestCategory(TypeName)]
1709 {
1711 {
1712 IRestResponse result = view.yelt.GetStream(stream =>
1713 {
1714 using (StreamReader reader = new StreamReader(stream))
1715 {
1716 string fullYELT = reader.ReadToEnd();
1717 Assert.IsNotNull(fullYELT);
1718 Console.WriteLine(fullYELT);
1719 // TODO: Test that the YELT is in an expected format
1720 }
1721 }, YELTOptions.Default, true);
1722 Assert.AreEqual(ResponseStatus.Completed, result.ResponseStatus);
1723 Assert.AreEqual(HttpStatusCode.OK, result.StatusCode);
1724 });
1725 }
1726
1727 [TestMethod, TestCategory(TypeName)]
1729 {
1731 {
1732 IRestResponse result = view.yelt.GetStream(stream =>
1733 {
1734 using (StreamReader reader = new StreamReader(stream))
1735 {
1736 reader.ReadLine(); // Skip header
1737 for (string line = reader.ReadLine(); line != null; line = reader.ReadLine())
1738 {
1739 string trial = line.Split(',').ElementAt(0);
1740 Assert.IsTrue(trial == "2" || trial == "3");
1741 }
1742 }
1743 }, new YELTOptions(start_trial: 2, end_trial: 3), true);
1744 Assert.AreEqual(ResponseStatus.Completed, result.ResponseStatus);
1745 Assert.AreEqual(HttpStatusCode.OK, result.StatusCode);
1746 });
1747 }
1748
1749 [TestMethod, TestCategory(TypeName)]
1751 {
1752 YELTOptions.AdditionalColumns[] additionalColumns = new[]{
1753 YELTOptions.AdditionalColumns.RecordType,
1754 YELTOptions.AdditionalColumns.ReinstatementBrokerage};
1755
1757 {
1758 IRestResponse result = view.yelt.GetStream(stream =>
1759 {
1760 using (StreamReader reader = new StreamReader(stream))
1761 {
1762 string headerLine = reader.ReadLine();
1763 string[] headers = headerLine.Split(',');
1764 Assert.IsTrue(additionalColumns.All(c => headers.Any(h => h.Contains(c.ToString("G")))));
1765
1766 for (string line = reader.ReadLine(); line != null; line = reader.ReadLine())
1767 {
1768 int field_count = line.Count(c => c == ',') + 1;
1769 Assert.AreEqual(4 + additionalColumns.Length, field_count);
1770 }
1771 }
1772 }, new YELTOptions(additional_columns: additionalColumns.Aggregate((l, r) => l | r)), true);
1773 Assert.AreEqual(ResponseStatus.Completed, result.ResponseStatus);
1774 Assert.AreEqual(HttpStatusCode.OK, result.StatusCode);
1775 });
1776 }
1777
1778 // TODO: Test YELT POST Parameters
1779 #endregion YELT
1780 #endregion File Download Endpoints
1781 }
1782}
virtual T TestResource
The resource used for all unit tests that require a valid prepared but unPOSTed resource.
virtual void POST_ThenDoAction(T validToPost, Action< T > toExecute)
Post a valid resource under the assumption that it will succeed, then perform an action on the result...
void AddCommonTestCleanupAction(Action action)
Exposes sample resource objects, with built-in methods for injecting dependencies.
Definition Samples.cs:14
static string Valid_NonExistant_UUID
Definition Samples.cs:41
IInjectableResource< CatXL > Layer_CatXL
static IEnumerable< Perspective > LegacyTestPerspectives
Create a list of distinct perspectives to test.
static IEnumerable< Perspective > TestPerspectives
Create a list of distinct perspectives to test.
static Perspective GetDefaultPerspectiveForAnalysisProfile(IReference< AnalysisProfile > analysisProfile)
Get the AnalysisProfile.default_perspective of the analysis profile.
Contains static helper methods for testing IAPIResourceView instances.
virtual void Test_IAPIResourceView_GET_ExceedanceProbability_Perspective_LossNetOfAggregateTermsAnd()
void Test_IAPIResourceView_Action_Fails< TException >(Action< T > action, Action< TException > exceptionTest)
static void Test_IAPIResourceView_Metrics_MixedLossTypes(T posted, Perspective leastGranularPerspective=null)
virtual void Test_IAPIResourceView_GET_ExceedanceProbability_Threshold_ValidValues()
Test numerous valid threshold values and ensure they succeed.
override void AdditionalValidResourceTests(T posted)
Deriving classes can optionally override this function to perform additional validation on every succ...
static readonly PollingOptions SimulationPolling
Settings used to ensure simulation requests are retried frequently and timeout after a reasonable amo...
static void TestMetricsRetrievable< T >(T view)
Asserts that the posted view's metrics can be retrieved without error.
static Action< ArgumentOutOfRangeException > ProbabilityError(double min, double max)
Test for a client-side ArgumentOutOfRangeException linked to a bad probability.
void Test_IAPIResourceView_GET_ExceedanceProbability_Succeeds(double threshold, ExceedanceProbabilityOptions options=null)
static string GetDefaultFilterNameForAnalysisProfile(IReference< AnalysisProfile > analysisProfile)
Get the name of the default filter of the analysis profile. This should be the first filter whose typ...
static void MethodIsAllowed(Action request, string methodName, bool methodAllowed=true)
Wrap a request in a tryGet with some formatting for testing purposes.
Definition AssertApi.cs:98
static void DatesAreEqual(DateTime? dt1, DateTime? dt2, string propertyName="DateTime")
Asserts that two dates are equal within the supported precision. Analyze Re only supports a precision...
Definition AssertApi.cs:635
static Action< APIRequestException > ApiExceptionTest(HttpStatusCode expectedStatusCode)
Generate a function that will test a REST request exception in a standard way.
Definition AssertApi.cs:539
static void DoublesAreEqual(double expected, double actual, Func< string > message, double? relative_tolerance=null)
Determines if two doubles are equivalent within the accepted tolerance.
Definition AssertApi.cs:594
Retrieve settings from environment variables if they exist, or the project settings file otherwise.
static bool RUN_OFFLINE
Controls whether tests that normally require a connection to the server should be allowed to try to r...
A collection of filthy hacks to populate some fields of APIResources objects of any type.
Definition Reflection.cs:41
A custom exception class that includes the RestSharp.IRestResponse that generated the exception,...
IRestResponse RestResponse
The IRestResponse that generated this exception.
ServerError ServerError
The ServerError object parsed from the response body, if available.
API methods / requests made available to the user.
static object RequestAndParse(Type deserializeType, string resource, Method method, IEnumerable< Parameter > requestParameters=null, int? timeout=null)
Perform a REST request on the server and serializes the response to the desired run-time type.
LayerView factory class.
Definition LayerViews.cs:9
Representation of a Catastrophe Excess of Loss (CatXL) layer.
Definition CatXL.cs:9
Optional parameters which can be specified for all aggregated simulation result requests.
AggregationMethod? aggregation_method
The aggregation method (AEP or OEP) used to compute this distribution. The default is AEP (Aggregate ...
The structure returned when requesting back-allocated metrics for a view.
IReference< ILayerView > layer_view
A layer view wrapping the back-allocated YELT (i.e. from which back-allocated YELT metrics can be ret...
Optional parameters which can be specified for co-metrics requests.
override RequestParameters GetParameters()
Get the REST request parameters corresponding to this configuration.
bool? include_primary_metrics
By default, only the component's metrics are computed and returned. The primary distribution is only ...
ReportingPeriod component_reporting_period
A reporting period which dictates optional begin and end dates to constrain what event losses are inc...
static new CoMetricsOptions Default
The default co-metrics request options used when none are specified.
The context of a co-metrics request, indicating what parameters were used in the simulation that prod...
new double probability
Obsolete: The CoMetrics object used to represent only the tail-co-metrics, but now more flexibly supp...
The structure returned when requesting Co-Metrics for a view.
Context context
The context of a co-metrics request, indicating what parameters were used in the simulation that prod...
Definition CoMetrics.cs:13
ConditionalDistributionMetrics component_metrics
The co-metrics computed for the requested component.
Definition CoMetrics.cs:21
TailDistributionMetrics primary_metrics
The tail metrics of the primary distribution (if requested).
Definition CoMetrics.cs:17
The metrics computed from a distribution filtered by some conditions. For example,...
double covariance
The covariance is a measure of the joint variability between the two distributions used to produce th...
double mean
The contribution to the mean value of the primary distribution.
double correlation
The correlation between the two distributions.
double min
The contribution to the minimum value in the primary distribution.
The common components of the context returned from any metrics request, indicating what parameters we...
Definition Context.cs:10
DateTime? reporting_period_begin
The reporting period starting DateTime (if it was set), which restricts results to only include losse...
Definition Context.cs:37
bool secondary_uncertainty
Whether or not to simulate using secondary uncertainty (if available). This is set to false only if s...
Definition Context.cs:32
Perspective perspective
The loss perspective determining which outputs are included in the loss distribution.
Definition Context.cs:14
APIResourceView.AggregationMethod aggregation_method
The aggregation method (AEP or OEP) used to compute this distribution. The default is AEP (Aggregate ...
Definition Context.cs:27
DateTime? reporting_period_end
The reporting period ending DateTime (if it was set), which restricts results to only include losses ...
Definition Context.cs:42
string filter
The name of the predefined filter used to determine which event losses are included in the resulting ...
Definition Context.cs:19
Optional parameters which can be specified for exceedance probability requests.
static new ExceedanceProbabilityOptions Default
The default exceedance probability request options used when none are specified.
string threshold_currency
The currency of the threshold parameter. If not specified, the threshold is assumed to be in the APIT...
bool? threshold_includes_participation
Whether or not participation have been included in threshold values.
bool? inclusive_threshold
Whether the threshold value is inclusive or not. If false (which is the default when unset),...
The context of a tail metrics request, indicating what parameters were used in the simulation that pr...
The structure returned when requesting Exceedance Probability for a view.
Context context
The context of a tail metrics request, indicating what parameters were used in the simulation that pr...
Optional parameters which can be specified for all metrics requests.
string currency
The currency to convert metrics results to. If not specified, the metrics will be returned in APIType...
bool? apply_participation
Whether or not participation should be applied to results.
override RequestParameters GetParameters()
Get the REST request parameters corresponding to this configuration.
static new MetricsOptions Default
The default metrics request options used when none are specified.
A reporting period which dictates optional begin and end dates to constrain what event losses are inc...
DateTime? end
(Optional) The exclusive end date of the reporting period. If specified, event losses occurring on or...
DateTime? begin
(Optional) The inclusive start date of the reporting period. If specified, event losses occurring bef...
Optional parameters which can be specified for all simulation result requests. Includes a variety of ...
ReportingPeriod reporting_period
A reporting period which dictates optional begin and end dates to constrain what event losses are inc...
bool? secondary_uncertainty
Whether or not to simulate using secondary uncertainty (if available). Set this to false to explicitl...
The structure returned when requesting Tail Metrics for a view.
double mean
The mean of the distribution.
double max
The maximum value in the distribution.
double min
The minimum value in the distribution.
double variance
The variance of the distribution.
The context of a tail metrics request, indicating what parameters were used in the simulation that pr...
double probability
The probability determining where the tail distribution begins. Note that this is equal to the Window...
The structure returned when requesting Tail Metrics for a view.
Context context
The context of a tail metrics request, indicating what parameters were used in the simulation that pr...
The context of a tail metrics request, indicating what parameters were used in the simulation that pr...
double max_probability
The probability determining where the window distribution ends.
double min_probability
The probability determining where the window distribution begins.
The structure returned when requesting Window Metrics for a view, containing the core window distribu...
Context context
The context of a window metrics request, indicating what parameters were used in the simulation that ...
Optional parameters which can be specified for yelt download requests.
Definition YELTOptions.cs:9
AdditionalColumns
Extra columns that may be selected for inclusion in the YELT file.
static new YELTOptions Default
The default yelt request options used when none are specified.
Thrown when a request requires additional time to complete, but it exceeds the time we are willing to...
int? QueuePosition
The server-reported queue position, indicating how many requests are ahead of this one before it can ...
The loss perspective determines what factors to include when computing a distribution.
static readonly Perspective LossGross
Construct a distribution from the structure's gross losses.
bool Equals(Perspective other)
Determine whether the two perspectives are equivalent.
static readonly Perspective NetLoss
Construct a distribution from the structure's net losses.
Determines the behaviour of the API when automatically retrying a request whose result is not yet rea...
Represents the Analysis of a Portfolio.
A probability range used to dictate the set of ordered trial losses in a loss distribution that shoul...
double min_probability
The inclusive lower-bound of the probability window, which will correspond to the largest trial loss ...
double max_probability
The inclusive upper-bound of the probability window, which will correspond to the smallest trial loss...
static ProbabilityWindow All
Returns a window representing the full probability range [0, 1], such that all trial losses will be i...
static ProbabilityWindow Tail(double tail_probability)
Returns a window representing the tail probability range [0, tail_probability], such that all losses ...
Helper class for extending the functionality of the static Enumclass with compile-time-type-specific ...
Definition Enum.cs:11
T Unposted
The unPOSTed resource definition.
PortfolioView and LayerView interface.
string id
The resource's unique identifier. It will be used in the request URL when requesting the resource fro...
Represents the Analysis of a Layer.
Base interface for all reference entities.
string ref_id
The id of the object being referred to.
Definition IReference.cs:17
T GetValue(IEnumerable< Parameter > requestParameters=null, int? timeout=null, bool updateCache=false)
Gets the resource that this reference refers to by requesting it from the server.
AggregationMethod
Specifies the method of aggregating event occurrences in a trial year for computing different types o...