direct tests
This commit is contained in:
parent
135d2e8cec
commit
3ec73e203d
@ -6,7 +6,7 @@
|
||||
"source": [
|
||||
"# Project Evaluation\n",
|
||||
"\n",
|
||||
"This file interfaces with a Proxmox server to automatically generate VM structures and plots for testing the\n",
|
||||
"This file interfaces with a Proxmox server to automatically generate VM structures and graphs for testing the\n",
|
||||
"success criteria of my project."
|
||||
]
|
||||
},
|
||||
@ -34,6 +34,7 @@
|
||||
"\n",
|
||||
"import runners\n",
|
||||
"from structure import StandardEnvironment, StandardTest, StandardIperfResult\n",
|
||||
"from structure.structure import DirectEnvironment, DirectTest\n",
|
||||
"\n",
|
||||
"%load_ext dotenv\n",
|
||||
"%dotenv"
|
||||
@ -116,6 +117,24 @@
|
||||
"### Direct Server to Server Testing"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def direct_tests():\n",
|
||||
" with DirectEnvironment(runner) as env:\n",
|
||||
" run_and_save_test(env, DirectTest(1, variation_target=0.4 if fast_tests else 0.2))\n",
|
||||
" run_and_save_test(env, DirectTest(2, variation_target=0.4 if fast_tests else 0.2))\n",
|
||||
" run_and_save_test(env, DirectTest(3, variation_target=0.4 if fast_tests else 0.2))\n",
|
||||
" run_and_save_test(env, DirectTest(4, variation_target=0.4 if fast_tests else 0.2))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
@ -482,7 +501,60 @@
|
||||
" {\n",
|
||||
" 'Varied Connection': StandardTest([2,2], events={10: (0,1), 15: (0,2)}, duration=30),\n",
|
||||
" },\n",
|
||||
" error_bars_y=True,\n",
|
||||
" filename='png',\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%% md\n"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"### Comparisons to a Direct Connection"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plot_iperf_results(\n",
|
||||
" {\n",
|
||||
" '1x2MB Connection (not proxied)': DirectTest(2),\n",
|
||||
" '2x1MB Connections (proxied)': StandardTest([1,1]),\n",
|
||||
" '1x1MB Connection (not proxied)': DirectTest(1),\n",
|
||||
"\n",
|
||||
" },\n",
|
||||
" filename='png',\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plot_iperf_results(\n",
|
||||
" {\n",
|
||||
" '1x4MB Connection (not proxied)': DirectTest(4),\n",
|
||||
" '2x2MB Connections (proxied)': StandardTest([2,2]),\n",
|
||||
" '1x2MB Connection (not proxied)': DirectTest(2),\n",
|
||||
"\n",
|
||||
" },\n",
|
||||
" filename='png',\n",
|
||||
")"
|
||||
]
|
||||
|
@ -364,6 +364,14 @@ class StandardTest:
|
||||
return ''.join(name_builder)
|
||||
|
||||
|
||||
class DirectTest(StandardTest):
|
||||
def __init__(self, rate: int, **kwargs):
|
||||
super().__init__([rate], **kwargs)
|
||||
|
||||
def name(self) -> str:
|
||||
return 'D{}'.format(super().name())
|
||||
|
||||
|
||||
class StandardIperfResult:
|
||||
def __init__(self, test: StandardTest, iperf: str, interval_size=1.0):
|
||||
self.test = test
|
||||
@ -423,10 +431,86 @@ class StandardIperfResult:
|
||||
return dict(zip(times, ranges))
|
||||
|
||||
|
||||
class StandardEnvironment:
|
||||
def repeat_until_satisfied(reducer, satisfied, initial=None, max_attempts=100, max_failures=3):
|
||||
val = initial()
|
||||
i = 0
|
||||
for i in range(max_attempts):
|
||||
for j in range(max_failures):
|
||||
try:
|
||||
val = reducer(val)
|
||||
except Exception as e:
|
||||
print('failed with {}'.format(e))
|
||||
if j == max_failures - 1:
|
||||
raise e
|
||||
|
||||
if satisfied(val):
|
||||
return val
|
||||
|
||||
raise RuntimeError('too many attempts')
|
||||
|
||||
|
||||
class BaseEnvironment:
|
||||
def __init__(self, runner, top_level_bridge: Bridge):
|
||||
self.top_level_bridge = top_level_bridge
|
||||
self._runner = runner
|
||||
|
||||
def __enter__(self):
|
||||
try:
|
||||
self._runner.build(self.top_level_bridge)
|
||||
except Exception as e:
|
||||
self._runner.teardown()
|
||||
raise e
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._runner.teardown()
|
||||
|
||||
def _test(
|
||||
self,
|
||||
test: StandardTest,
|
||||
inbound_server: SpeedTestServer,
|
||||
inbound_client: SpeedTestServer,
|
||||
rated_node: Node,
|
||||
expected_interfaces: int,
|
||||
) -> Tuple[StandardIperfResult, StandardIperfResult]:
|
||||
if len(test.rates) != expected_interfaces:
|
||||
raise RuntimeError('mismatched number of interfaces')
|
||||
|
||||
results = []
|
||||
for server, client in [(inbound_server, inbound_client), (inbound_client, inbound_server)]:
|
||||
def test_reducer(old: Optional[StandardIperfResult]) -> StandardIperfResult:
|
||||
for i, r in enumerate(test.rates):
|
||||
rated_node.get_interfaces()[i].set_rate(r)
|
||||
server.server()
|
||||
|
||||
for t, (iface, rate) in test.events.items():
|
||||
threading.Timer(
|
||||
5 + t,
|
||||
(lambda s: lambda: s.lp.get_interfaces()[iface].set_rate(rate))(self),
|
||||
)
|
||||
|
||||
iperf = client.client(server, time=test.duration)
|
||||
if old is None:
|
||||
return StandardIperfResult(test, iperf)
|
||||
else:
|
||||
old.add_results(iperf)
|
||||
return old
|
||||
|
||||
result = repeat_until_satisfied(
|
||||
test_reducer,
|
||||
lambda x: max(x.coefficient_variance().values()) < test.variation_target,
|
||||
max_failures=test.max_failures,
|
||||
max_attempts=test.max_attempts,
|
||||
)
|
||||
results.append(result)
|
||||
|
||||
# Return a tuple of (inbound, outbound)
|
||||
return results[0], results[1]
|
||||
|
||||
|
||||
class StandardEnvironment(BaseEnvironment):
|
||||
def __init__(self, interfaces: int, runner, setup_params: dict):
|
||||
self._interfaces = interfaces
|
||||
self._runner = runner
|
||||
|
||||
self.rp = RemotePortal([Interface(IpMethod.Auto4)], setup_params=setup_params)
|
||||
|
||||
@ -442,64 +526,25 @@ class StandardEnvironment:
|
||||
self.rp.set_local_portal(self.lp)
|
||||
self.lp.set_remote_portal(self.rp)
|
||||
|
||||
self.top_level_bridge = Bridge(
|
||||
super().__init__(runner, Bridge(
|
||||
self.st.get_interfaces()[0],
|
||||
self.rp.get_interfaces()[0],
|
||||
*self.lp.get_interfaces()[0:interfaces],
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
try:
|
||||
self._runner.build(self.top_level_bridge)
|
||||
except Exception as e:
|
||||
self._runner.teardown()
|
||||
raise e
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._runner.teardown()
|
||||
))
|
||||
|
||||
def test(self, test: StandardTest) -> Tuple[StandardIperfResult, StandardIperfResult]:
|
||||
if len(test.rates) != self._interfaces:
|
||||
raise RuntimeError('mismatched number of interfaces')
|
||||
return self._test(test, self.st, self.cl, self.lp, self._interfaces)
|
||||
|
||||
for i, r in enumerate(test.rates):
|
||||
self.lp.get_interfaces()[i].set_rate(r)
|
||||
|
||||
results = []
|
||||
for server, client in [(self.cl, self.st), (self.st, self.cl)]:
|
||||
result: Optional[StandardIperfResult] = None
|
||||
class DirectEnvironment(BaseEnvironment):
|
||||
def __init__(self, runner):
|
||||
self.st1 = SpeedTestServer()
|
||||
self.st2 = SpeedTestServer()
|
||||
|
||||
for i in range(test.max_attempts):
|
||||
if i > 2 and max(result.coefficient_variance().values()) < test.variation_target:
|
||||
break
|
||||
super().__init__(runner, Bridge(
|
||||
self.st1.get_interfaces()[0],
|
||||
self.st2.get_interfaces()[0],
|
||||
))
|
||||
|
||||
for j in range(test.max_failures):
|
||||
try:
|
||||
server.server()
|
||||
|
||||
for t, (iface, rate) in test.events.items():
|
||||
threading.Timer(
|
||||
5 + t,
|
||||
(lambda s: lambda: s.lp.get_interfaces()[iface].set_rate(rate))(self),
|
||||
)
|
||||
|
||||
iperf = client.client(server, time=test.duration)
|
||||
if result is None:
|
||||
result = StandardIperfResult(test, iperf)
|
||||
else:
|
||||
result.add_results(iperf)
|
||||
|
||||
break
|
||||
except Exception as e:
|
||||
print('failed with {}'.format(e))
|
||||
if j == test.max_failures - 1:
|
||||
raise e
|
||||
|
||||
if max(result.coefficient_variance().values()) > test.variation_target:
|
||||
raise RuntimeError('too many attempts')
|
||||
|
||||
results.append(result)
|
||||
|
||||
# Return a tuple of (inbound, outbound)
|
||||
return results[0], results[1]
|
||||
def test(self, test: StandardTest) -> Tuple[StandardIperfResult, StandardIperfResult]:
|
||||
return self._test(test, self.st2, self.st1, self.st2, 1)
|
||||
|
Loading…
Reference in New Issue
Block a user