From b9305215572101f190cb08168d206571e823fc09 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 14 Jun 2017 11:35:41 +0100 Subject: [PATCH 01/87] Rename test file that deals with POST requests - and doesn't just test the underlying methods - all the tests in this file actually make POST request to an endpoint --- api/tests/{test_cloud_record.py => test_cloud_record_post.py} | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) rename api/tests/{test_cloud_record.py => test_cloud_record_post.py} (98%) diff --git a/api/tests/test_cloud_record.py b/api/tests/test_cloud_record_post.py similarity index 98% rename from api/tests/test_cloud_record.py rename to api/tests/test_cloud_record_post.py index 0c58967..0d4ecbe 100644 --- a/api/tests/test_cloud_record.py +++ b/api/tests/test_cloud_record_post.py @@ -8,15 +8,13 @@ from django.core.urlresolvers import reverse from django.test import Client, TestCase from mock import Mock -from rest_framework.request import Request -from rest_framework.test import APIRequestFactory from api.views.CloudRecordView import CloudRecordView QPATH_TEST = '/tmp/django-test/' -class CloudRecordTest(TestCase): +class CloudRecordPostTest(TestCase): """Tests POST requests to the Cloud Record endpoint.""" def setUp(self): From fa5c874154c0f59cc300125d4143a1b72a2c8318 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 14 Jun 2017 11:40:14 +0100 Subject: [PATCH 02/87] Add test of the helper method of POST requests - these tests do not make POST requests --- api/tests/test_cloud_record_helper.py | 93 +++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 api/tests/test_cloud_record_helper.py diff --git a/api/tests/test_cloud_record_helper.py b/api/tests/test_cloud_record_helper.py new file mode 100644 index 0000000..ae2b886 --- /dev/null +++ b/api/tests/test_cloud_record_helper.py @@ -0,0 +1,93 @@ +"""This module tests the helper methods of the CloudRecordView class.""" + +import logging + +from django.test import TestCase +from mock import Mock + +from api.views.CloudRecordView import CloudRecordView + + +class CloudRecordHelperTest(TestCase): + """Tests the helper methods of the CloudRecordView class.""" + + def setUp(self): + """Prevent logging from appearing in test output.""" + logging.disable(logging.CRITICAL) + + def test_signer_is_valid(self): + """ + Test the CloudRecordView._signer_is_valid method. + + That method determines wether a POST request should be allowed or not. + """ + # mock the external call to the CMDB to retrieve the providers + # used in the _signer_is_valid method we are testing + CloudRecordView._get_provider_list = Mock(return_value=PROVIDERS) + test_cloud_view = CloudRecordView() + + # The DN corresponds to a host listed as a provider + allowed_dn = "/C=XX/O=XX/OU=XX/L=XX/CN=allowed_host.test" + + # The Host this DN corresponds to will + # be explicitly granted POST rights + extra_dn = "/C=XX/O=XX/OU=XX/L=XX/CN=special_host.test" + + # The Host this DN corresponds to will + # have its POST rights explicitly revoked + banned_dn = "/C=XX/O=XX/OU=XX/L=XX/CN=allowed_host2.test" + + # The Host this DN corresponds is unknown to the system, + # it should not be granted POST rights + unknown_dn = "/C=XX/O=XX/OU=XX/L=XX/CN=mystery_host.test" + + # Grant/Revoke POST fights + with self.settings(ALLOWED_TO_POST=[extra_dn], + BANNED_FROM_POST=[banned_dn]): + + # DNs corresponding to hosts listed as a provider should be valid + # i.e have POST rights + self.assertTrue(test_cloud_view._signer_is_valid(allowed_dn)) + + # DNs corresponding to hosts with explicit access should be valid + # i.e have POST rights + self.assertTrue(test_cloud_view._signer_is_valid(extra_dn)) + + # DNs corresponding to banned hosts should be invalid + # i.e not have POST rights + self.assertFalse(test_cloud_view._signer_is_valid(banned_dn)) + + # DNs corresponding to unknonw hosts should be invalid + # i.e not have POST rights + self.assertFalse(test_cloud_view._signer_is_valid(unknown_dn)) + + # mock the external call to the CMDB to retrieve the providers + # used in the _signer_is_valid method we are testing + # now we are mocking a failure of the CMDB to respond as expected + CloudRecordView._get_provider_list = Mock(return_value={}) + # in which case we should reject all POST requests + self.assertFalse(test_cloud_view._signer_is_valid(allowed_dn)) + +PROVIDERS = {'total_rows': 735, + 'offset': 695, + 'rows': [ + {'id': '1', + 'key': ['service'], + 'value':{ + 'sitename': 'TEST2', + 'provider_id': 'TEST2', + 'type': 'cloud'}}, + {'id': '2', + 'key': ['service'], + 'value':{ + 'sitename': 'TEST', + 'provider_id': 'TEST', + 'hostname': 'allowed_host.test', + 'type': 'cloud'}}, + {'id': '3', + 'key': ['service'], + 'value':{ + 'sitename': 'TEST', + 'provider_id': 'TEST', + 'hostname': 'allowed_host2.test', + 'type': 'cloud'}}]} From d800a388043024f4c2e36640ab7a2befdd4897ea Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 14 Jun 2017 13:28:44 +0100 Subject: [PATCH 03/87] Move PROVIDERS and MESSAGE into __init__.py - so both files dealing with the CloudRecordView class can access them, rather than have two class level PROVIDERS variables defined --- api/tests/__init__.py | 93 ++++++++++++++++++++++++++- api/tests/test_cloud_record_helper.py | 25 +------ api/tests/test_cloud_record_post.py | 66 +------------------ 3 files changed, 94 insertions(+), 90 deletions(-) diff --git a/api/tests/__init__.py b/api/tests/__init__.py index 498e5ae..80bc091 100644 --- a/api/tests/__init__.py +++ b/api/tests/__init__.py @@ -1 +1,92 @@ -"""This file allows test to be imported as a python package.""" +""" +Copyright (C) 2016 STFC. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +@author Greg Corbett +""" +__version__ = '1, 4, 0' + +# Example output from the provider list on the CMDB +PROVIDERS = {'total_rows': 735, + 'offset': 695, + 'rows': [ + {'id': '1', + 'key': ['service'], + 'value':{ + 'sitename': 'TEST2', + 'provider_id': 'TEST2', + 'type': 'cloud'}}, + {'id': '2', + 'key': ['service'], + 'value':{ + 'sitename': 'TEST', + 'provider_id': 'TEST', + 'hostname': 'allowed_host.test', + 'type': 'cloud'}}, + {'id': '3', + 'key': ['service'], + 'value':{ + 'sitename': 'TEST', + 'provider_id': 'TEST', + 'hostname': 'allowed_host2.test', + 'type': 'cloud'}}]} + +# An example APEL Cloud Message V0.2 +MESSAGE = """APEL-cloud-message: v0.2 +VMUUID: TestVM1 2013-02-25 17:37:27+00:00 +SiteName: CESGA +MachineName: one-2421 +LocalUserId: 19 +LocalGroupId: 101 +GlobalUserName: NULL +FQAN: NULL +Status: completed +StartTime: 1361813847 +EndTime: 1361813870 +SuspendDuration: NULL +WallDuration: NULL +CpuDuration: NULL +CpuCount: 1 +NetworkType: NULL +NetworkInbound: 0 +NetworkOutbound: 0 +Memory: 1000 +Disk: NULL +StorageRecordId: NULL +ImageId: NULL +CloudType: OpenNebula +%% +VMUUID: TestVM1 2015-06-25 17:37:27+00:00 +SiteName: CESGA +MachineName: one-2422 +LocalUserId: 13 +LocalGroupId: 131 +GlobalUserName: NULL +FQAN: NULL +Status: completed +StartTime: 1361413847 +EndTime: 1361811870 +SuspendDuration: NULL +WallDuration: NULL +CpuDuration: NULL +CpuCount: 1 +NetworkType: NULL +NetworkInbound: 0 +NetworkOutbound: 0 +Memory: 1000 +Disk: NULL +StorageRecordId: NULL +ImageId: NULL +CloudType: OpenNebula +%%""" diff --git a/api/tests/test_cloud_record_helper.py b/api/tests/test_cloud_record_helper.py index ae2b886..af1881d 100644 --- a/api/tests/test_cloud_record_helper.py +++ b/api/tests/test_cloud_record_helper.py @@ -5,6 +5,7 @@ from django.test import TestCase from mock import Mock +from api.tests import PROVIDERS from api.views.CloudRecordView import CloudRecordView @@ -67,27 +68,3 @@ def test_signer_is_valid(self): CloudRecordView._get_provider_list = Mock(return_value={}) # in which case we should reject all POST requests self.assertFalse(test_cloud_view._signer_is_valid(allowed_dn)) - -PROVIDERS = {'total_rows': 735, - 'offset': 695, - 'rows': [ - {'id': '1', - 'key': ['service'], - 'value':{ - 'sitename': 'TEST2', - 'provider_id': 'TEST2', - 'type': 'cloud'}}, - {'id': '2', - 'key': ['service'], - 'value':{ - 'sitename': 'TEST', - 'provider_id': 'TEST', - 'hostname': 'allowed_host.test', - 'type': 'cloud'}}, - {'id': '3', - 'key': ['service'], - 'value':{ - 'sitename': 'TEST', - 'provider_id': 'TEST', - 'hostname': 'allowed_host2.test', - 'type': 'cloud'}}]} diff --git a/api/tests/test_cloud_record_post.py b/api/tests/test_cloud_record_post.py index 0d4ecbe..72950cc 100644 --- a/api/tests/test_cloud_record_post.py +++ b/api/tests/test_cloud_record_post.py @@ -9,6 +9,7 @@ from django.test import Client, TestCase from mock import Mock +from api.tests import MESSAGE, PROVIDERS from api.views.CloudRecordView import CloudRecordView QPATH_TEST = '/tmp/django-test/' @@ -167,68 +168,3 @@ def _check_record_post(self, message, expected_status, # check saved message content self.assertEqual(message, message_content) - -PROVIDERS = {'total_rows': 735, - 'offset': 695, - 'rows': [ - {'id': '1', - 'key': ['service'], - 'value':{ - 'sitename': 'TEST2', - 'provider_id': 'TEST2', - 'type': 'cloud'}}, - {'id': '2', - 'key': ['service'], - 'value':{ - 'sitename': 'TEST', - 'provider_id': 'TEST', - 'hostname': 'allowed_host.test', - 'type': 'cloud'}}]} - -MESSAGE = """APEL-cloud-message: v0.2 -VMUUID: TestVM1 2013-02-25 17:37:27+00:00 -SiteName: CESGA -MachineName: one-2421 -LocalUserId: 19 -LocalGroupId: 101 -GlobalUserName: NULL -FQAN: NULL -Status: completed -StartTime: 1361813847 -EndTime: 1361813870 -SuspendDuration: NULL -WallDuration: NULL -CpuDuration: NULL -CpuCount: 1 -NetworkType: NULL -NetworkInbound: 0 -NetworkOutbound: 0 -Memory: 1000 -Disk: NULL -StorageRecordId: NULL -ImageId: NULL -CloudType: OpenNebula -%% -VMUUID: TestVM1 2015-06-25 17:37:27+00:00 -SiteName: CESGA -MachineName: one-2422 -LocalUserId: 13 -LocalGroupId: 131 -GlobalUserName: NULL -FQAN: NULL -Status: completed -StartTime: 1361413847 -EndTime: 1361811870 -SuspendDuration: NULL -WallDuration: NULL -CpuDuration: NULL -CpuCount: 1 -NetworkType: NULL -NetworkInbound: 0 -NetworkOutbound: 0 -Memory: 1000 -Disk: NULL -StorageRecordId: NULL -ImageId: NULL -CloudType: OpenNebula -%%""" From e148338c3e40c7e1798a649f74d58b5f4f733b6c Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 24 Aug 2017 17:05:56 +0100 Subject: [PATCH 04/87] Update .travis.yml - Remove pip log before caching to avoid it affecting the cache. - Add --branch argument to coverage run as the coveralls module reports coverage to Coveralls.io now. - Test against latest nightly rather than a fixed Python 3 version so that it doesn't have to be manually changed in future. --- .travis.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0bbf709..dd9165a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,10 +2,10 @@ language: python python: - "2.6" - "2.7" - - "3.6" + - "nightly" matrix: allow_failures: - - python: "3.6" + - python: "nightly" fast_finish: true # use the mysql service @@ -16,6 +16,8 @@ services: sudo: false # Cache the dependencies installed by pip cache: pip +# Avoid pip log from affecting cache +before_cache: rm -fv ~/.cache/pip/log/debug.log install: - pip install -r requirements.txt @@ -29,6 +31,6 @@ before_script: - export PYTHONPATH=$PYTHONPATH:`pwd -P` # Command to run tests -script: coverage run --source='.' manage.py test +script: coverage run --branch --source='.' manage.py test after_success: coveralls From ddf4e661e0b8ba77f9eb7d6bc19d4abfdfeeba5a Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 14 Jun 2017 14:55:37 +0100 Subject: [PATCH 05/87] Split test_cloud_record_summary.py into two files - tests in api/tests/test_cloud_record_summary_get.py make get requests and looks at the response for test pass/fail - api/tests/test_cloud_record_summary_helper.py test helper methods and looks at the result of those method to determine test pass/fail (note, some of these do make get request objects, as the helper methods expect take the whole request as input) --- ...ry.py => test_cloud_record_summary_get.py} | 124 +------------- api/tests/test_cloud_record_summary_helper.py | 151 ++++++++++++++++++ 2 files changed, 152 insertions(+), 123 deletions(-) rename api/tests/{test_cloud_record_summary.py => test_cloud_record_summary_get.py} (68%) create mode 100644 api/tests/test_cloud_record_summary_helper.py diff --git a/api/tests/test_cloud_record_summary.py b/api/tests/test_cloud_record_summary_get.py similarity index 68% rename from api/tests/test_cloud_record_summary.py rename to api/tests/test_cloud_record_summary_get.py index 1100bca..827fbae 100644 --- a/api/tests/test_cloud_record_summary.py +++ b/api/tests/test_cloud_record_summary_get.py @@ -7,12 +7,11 @@ from django.core.urlresolvers import reverse from django.test import Client, TestCase from mock import Mock -from rest_framework.test import APIRequestFactory QPATH_TEST = '/tmp/django-test/' -class CloudRecordSummaryTest(TestCase): +class CloudRecordSummaryGetTest(TestCase): """Tests GET requests to the Cloud Sumamry Record endpoint.""" def setUp(self): @@ -168,127 +167,6 @@ def test_cloud_record_summary_get_200(self): self._clear_database(database) database.close() - def test_parse_query_parameters(self): - """Test the parsing of query parameters.""" - # test a get with group, summary, start, end and user - test_cloud_view = CloudRecordSummaryView() - factory = APIRequestFactory() - url = ''.join((reverse('CloudRecordSummaryView'), - '?group=Group1', - '&service=Service1', - '&from=FromDate', - '&to=ToDate', - '&user=UserA')) - - request = factory.get(url) - - parsed_responses = test_cloud_view._parse_query_parameters(request) - self.assertEqual(parsed_responses, - ("Group1", "Service1", "FromDate", - "ToDate", "UserA")) - - # test a get with just an end date - url = ''.join((reverse('CloudRecordSummaryView'), - '?to=ToDate')) - - request = factory.get(url) - parsed_responses = test_cloud_view._parse_query_parameters(request) - self.assertEqual(parsed_responses, - (None, None, None, - "ToDate", None)) - - def test_paginate_result(self): - """Test an empty result is paginated correctly.""" - # test when no page is given. - test_cloud_view = CloudRecordSummaryView() - content = test_cloud_view._paginate_result(None, []) - expected_content = {'count': 0, - 'previous': None, - u'results': [], - 'next': None} - - self.assertEqual(content, expected_content) - - # test when page number is incorrect/invalid - factory = APIRequestFactory() - - # test when page number is not a number - url = ''.join((reverse('CloudRecordSummaryView'), - '?page=a')) - - request = factory.get(url) - content = test_cloud_view._paginate_result(request, []) - self.assertEqual(content, expected_content) - - # test when page number is out of bounds - url = ''.join((reverse('CloudRecordSummaryView'), - '?page=9999')) - - request = factory.get(url) - content = test_cloud_view._paginate_result(request, []) - self.assertEqual(content, expected_content) - - def test_request_to_token(self): - """Test a token can be extracted from request.""" - test_cloud_view = CloudRecordSummaryView() - factory = APIRequestFactory() - url = ''.join((reverse('CloudRecordSummaryView'), - '?from=FromDate')) - - request = factory.get(url, HTTP_AUTHORIZATION='Bearer ThisIsAToken') - - token = test_cloud_view._request_to_token(request) - self.assertEqual(token, 'ThisIsAToken') - - def test_request_to_token_fail(self): - """Test the response of a tokenless request.""" - test_client = Client() - - url = ''.join((reverse('CloudRecordSummaryView'), - '?from=FromDate')) - - response = test_client.get(url) - - self.assertEqual(response.status_code, 401) - - def test_is_client_authorized(self): - """Test a example client is authorised.""" - test_cloud_view = CloudRecordSummaryView() - with self.settings(ALLOWED_FOR_GET='IAmAllowed'): - self.assertTrue( - test_cloud_view._is_client_authorized( - 'IAmAllowed')) - - def test_is_client_authorized_fail(self): - """Test the failure of un-authorised clients.""" - test_cloud_view = CloudRecordSummaryView() - with self.settings(ALLOWED_FOR_GET='IAmAllowed'): - self.assertFalse( - test_cloud_view._is_client_authorized( - 'IAmNotAllowed')) - - def test_filter_cursor(self): - """Test the filtering of a query object based on settings.""" - test_cloud_view = CloudRecordSummaryView() - - # A list of test summaries. - test_data = [{'Day': 30, - 'Month': 7, - 'Year': 2016, - 'SiteName': 'TEST'}] - - cursor = Mock() - # Get the mock cursor object return the test_data. - cursor.fetchall = Mock(return_value=test_data) - - with self.settings(RETURN_HEADERS=['SiteName', 'Day']): - result = test_cloud_view._filter_cursor(cursor) - - expected_result = [{'SiteName': 'TEST', - 'Day': 30}] - - self.assertEqual(result, expected_result) - def tearDown(self): """Delete any messages under QPATH and re-enable logging.INFO.""" logging.disable(logging.NOTSET) diff --git a/api/tests/test_cloud_record_summary_helper.py b/api/tests/test_cloud_record_summary_helper.py new file mode 100644 index 0000000..9780cc8 --- /dev/null +++ b/api/tests/test_cloud_record_summary_helper.py @@ -0,0 +1,151 @@ +"""This module tests the helper methods of the CloudRecordView class.""" + +import logging +import MySQLdb + +from api.views.CloudRecordSummaryView import CloudRecordSummaryView +from django.core.urlresolvers import reverse +from django.test import Client, TestCase +from mock import Mock +from rest_framework.test import APIRequestFactory + +QPATH_TEST = '/tmp/django-test/' + + +class CloudRecordSummaryHelperTest(TestCase): + """ + Tests the helper methods of the CloudRecordSummaryView class. + + Some of these do make GET Request objects, as the helper methods + expect take the whole request as input, they do not call + CloudRecordSummaryView.get() however. + """ + + def setUp(self): + """Prevent logging from appearing in test output.""" + logging.disable(logging.CRITICAL) + + def test_parse_query_parameters(self): + """Test the parsing of query parameters.""" + # test a get with group, summary, start, end and user + test_cloud_view = CloudRecordSummaryView() + factory = APIRequestFactory() + url = ''.join((reverse('CloudRecordSummaryView'), + '?group=Group1', + '&service=Service1', + '&from=FromDate', + '&to=ToDate', + '&user=UserA')) + + request = factory.get(url) + + parsed_responses = test_cloud_view._parse_query_parameters(request) + self.assertEqual(parsed_responses, + ("Group1", "Service1", "FromDate", + "ToDate", "UserA")) + + # test a get with just an end date + url = ''.join((reverse('CloudRecordSummaryView'), + '?to=ToDate')) + + request = factory.get(url) + parsed_responses = test_cloud_view._parse_query_parameters(request) + self.assertEqual(parsed_responses, + (None, None, None, + "ToDate", None)) + + def test_paginate_result(self): + """Test an empty result is paginated correctly.""" + # test when no page is given. + test_cloud_view = CloudRecordSummaryView() + content = test_cloud_view._paginate_result(None, []) + expected_content = {'count': 0, + 'previous': None, + u'results': [], + 'next': None} + + self.assertEqual(content, expected_content) + + # test when page number is incorrect/invalid + factory = APIRequestFactory() + + # test when page number is not a number + url = ''.join((reverse('CloudRecordSummaryView'), + '?page=a')) + + request = factory.get(url) + content = test_cloud_view._paginate_result(request, []) + self.assertEqual(content, expected_content) + + # test when page number is out of bounds + url = ''.join((reverse('CloudRecordSummaryView'), + '?page=9999')) + + request = factory.get(url) + content = test_cloud_view._paginate_result(request, []) + self.assertEqual(content, expected_content) + + def test_request_to_token(self): + """Test a token can be extracted from request.""" + test_cloud_view = CloudRecordSummaryView() + factory = APIRequestFactory() + url = ''.join((reverse('CloudRecordSummaryView'), + '?from=FromDate')) + + request = factory.get(url, HTTP_AUTHORIZATION='Bearer ThisIsAToken') + + token = test_cloud_view._request_to_token(request) + self.assertEqual(token, 'ThisIsAToken') + + def test_request_to_token_fail(self): + """Test the response of a tokenless request.""" + test_client = Client() + + url = ''.join((reverse('CloudRecordSummaryView'), + '?from=FromDate')) + + response = test_client.get(url) + + self.assertEqual(response.status_code, 401) + + def test_is_client_authorized(self): + """Test a example client is authorised.""" + test_cloud_view = CloudRecordSummaryView() + with self.settings(ALLOWED_FOR_GET='IAmAllowed'): + self.assertTrue( + test_cloud_view._is_client_authorized( + 'IAmAllowed')) + + def test_is_client_authorized_fail(self): + """Test the failure of un-authorised clients.""" + test_cloud_view = CloudRecordSummaryView() + with self.settings(ALLOWED_FOR_GET='IAmAllowed'): + self.assertFalse( + test_cloud_view._is_client_authorized( + 'IAmNotAllowed')) + + def test_filter_cursor(self): + """Test the filtering of a query object based on settings.""" + test_cloud_view = CloudRecordSummaryView() + + # A list of test summaries. + test_data = [{'Day': 30, + 'Month': 7, + 'Year': 2016, + 'SiteName': 'TEST'}] + + cursor = Mock() + # Get the mock cursor object return the test_data. + cursor.fetchall = Mock(return_value=test_data) + + with self.settings(RETURN_HEADERS=['SiteName', 'Day']): + result = test_cloud_view._filter_cursor(cursor) + + expected_result = [{'SiteName': 'TEST', + 'Day': 30}] + + self.assertEqual(result, expected_result) + + def tearDown(self): + """Delete any messages under QPATH and re-enable logging.INFO.""" + logging.disable(logging.NOTSET) From a9142e61ab30db0d543845f6b23d91e4ab8a84cf Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 14 Jun 2017 15:47:13 +0100 Subject: [PATCH 06/87] Remove test_request_to_token_fail - the functionality of making a GET request with a malformed/missing AuthN header is already tested in api/tests/test_cloud_record_summary_get.py - new functionality has been added to test_request_to_token to unittest the underlying method for the above cases --- api/tests/test_cloud_record_summary_helper.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/api/tests/test_cloud_record_summary_helper.py b/api/tests/test_cloud_record_summary_helper.py index 9780cc8..3ccd9b6 100644 --- a/api/tests/test_cloud_record_summary_helper.py +++ b/api/tests/test_cloud_record_summary_helper.py @@ -1,11 +1,10 @@ """This module tests the helper methods of the CloudRecordView class.""" import logging -import MySQLdb from api.views.CloudRecordSummaryView import CloudRecordSummaryView from django.core.urlresolvers import reverse -from django.test import Client, TestCase +from django.test import TestCase from mock import Mock from rest_framework.test import APIRequestFactory @@ -92,21 +91,22 @@ def test_request_to_token(self): url = ''.join((reverse('CloudRecordSummaryView'), '?from=FromDate')) + # Test we can extract the token from a + # normaly structured Authorization header request = factory.get(url, HTTP_AUTHORIZATION='Bearer ThisIsAToken') - token = test_cloud_view._request_to_token(request) self.assertEqual(token, 'ThisIsAToken') - def test_request_to_token_fail(self): - """Test the response of a tokenless request.""" - test_client = Client() - - url = ''.join((reverse('CloudRecordSummaryView'), - '?from=FromDate')) - - response = test_client.get(url) + # Test we return None when failing to extract the token + # from a malformed Authorization header + request = factory.get(url, HTTP_AUTHORIZATION='ThisIsAToken') + token = test_cloud_view._request_to_token(request) + self.assertEqual(token, None) - self.assertEqual(response.status_code, 401) + # Test we return None when no token provided + request = factory.get(url) + token = test_cloud_view._request_to_token(request) + self.assertEqual(token, None) def test_is_client_authorized(self): """Test a example client is authorised.""" From 0b813c252fdf5351d278615008e02e750a461022 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Thu, 15 Jun 2017 08:54:15 +0100 Subject: [PATCH 07/87] Refactor tests to use helper GET method --- api/tests/test_cloud_record_summary_get.py | 129 ++++++++++----------- 1 file changed, 60 insertions(+), 69 deletions(-) diff --git a/api/tests/test_cloud_record_summary_get.py b/api/tests/test_cloud_record_summary_get.py index 827fbae..1f21c23 100644 --- a/api/tests/test_cloud_record_summary_get.py +++ b/api/tests/test_cloud_record_summary_get.py @@ -36,18 +36,11 @@ def test_cloud_record_summary_get_IAM_fail(self): "Day", "Month", "Year"]): - - test_client = Client() - url = ''.join((reverse('CloudRecordSummaryView'), - '?group=TestGroup', - '&from=20000101', - '&to=20191231')) - - response = test_client.get(url, - HTTP_AUTHORIZATION="Bearer TestToken") - - # Check the expected response code has been received. - self.assertEqual(response.status_code, 401) + # Make (and check) the GET request + self._check_summary_get(401, + options=("?group=TestGroup" + "&from=20000101&to=20191231"), + authZ_header_cont="Bearer TestToken") def test_cloud_record_summary_get_400(self): """Test a GET request without the from field.""" @@ -61,15 +54,10 @@ def test_cloud_record_summary_get_400(self): "Day", "Month", "Year"]): - test_client = Client() - url = ''.join((reverse('CloudRecordSummaryView'), - '?group=TestGroup')) - response = test_client.get(url, - HTTP_AUTHORIZATION="Bearer TestToken") - - # Check the expected response code has been received. - self.assertEqual(response.status_code, 400) + # Make (and check) the GET request + self._check_summary_get(400, options="?group=TestGroup", + authZ_header_cont="Bearer TestToken") def test_cloud_record_summary_get_403(self): """Test an unauthorized GET request.""" @@ -83,38 +71,26 @@ def test_cloud_record_summary_get_403(self): "Day", "Month", "Year"]): - test_client = Client() - url = ''.join((reverse('CloudRecordSummaryView'), - '?group=TestGroup', - '&from=20000101', - '&to=20191231')) - - response = test_client.get(url, - HTTP_AUTHORIZATION="Bearer TestToken") - - # Check the expected response code has been received. - self.assertEqual(response.status_code, 403) + # Make (and check) the GET request + self._check_summary_get(403, + options=("?group=TestGroup" + "&from=20000101&to=20191231"), + authZ_header_cont="Bearer TestToken") def test_cloud_record_summary_get_401(self): """Test an unauthenticated GET request.""" - test_client = Client() # Test without the HTTP_AUTHORIZATION header - url = ''.join((reverse('CloudRecordSummaryView'), - '?group=TestGroup', - '&from=20000101', - '&to=20191231')) - - response = test_client.get(url) - - # Check the expected response code has been received. - self.assertEqual(response.status_code, 401) + # Make (and check) the GET request + self._check_summary_get(401, + options=("?group=TestGroup" + "&from=20000101&to=20191231")) # Test with a malformed HTTP_AUTHORIZATION header - response = test_client.get(url, - HTTP_AUTHORIZATION='TestToken') - - # Check the expected response code has been received. - self.assertEqual(response.status_code, 401) + # Make (and check) the GET request + self._check_summary_get(401, + options=("?group=TestGroup" + "&from=20000101&to=20191231"), + authZ_header_cont="TestToken") def test_cloud_record_summary_get_200(self): """Test a successful GET request.""" @@ -128,20 +104,6 @@ def test_cloud_record_summary_get_200(self): # Mock the functionality of the IAM CloudRecordSummaryView._token_to_id = Mock(return_value="TestService") - with self.settings(ALLOWED_FOR_GET='TestService', - RETURN_HEADERS=["WallDuration", - "Day", - "Month", - "Year"]): - test_client = Client() - url = ''.join((reverse('CloudRecordSummaryView'), - '?group=TestGroup', - '&from=20000101', - '&to=20191231')) - - response = test_client.get(url, - HTTP_AUTHORIZATION="Bearer TestToken") - expected_response = ('{' '"count":2,' '"next":null,' @@ -157,20 +119,49 @@ def test_cloud_record_summary_get_200(self): '"Day":31,' '"Month":7}]}') - try: - # Check the expected response code has been received. - self.assertEqual(response.status_code, 200) - # Check the response received is as expected. - self.assertEqual(response.content, expected_response) - # Clean up after test. - finally: - self._clear_database(database) - database.close() + with self.settings(ALLOWED_FOR_GET='TestService', + RETURN_HEADERS=["WallDuration", + "Day", + "Month", + "Year"]): + try: + self._check_summary_get(200, + expected_response=expected_response, + options=("?group=TestGroup" + "&from=20000101&to=20191231"), + authZ_header_cont="Bearer TestToken") + finally: + # Clean up after test. + self._clear_database(database) + database.close() def tearDown(self): """Delete any messages under QPATH and re-enable logging.INFO.""" logging.disable(logging.NOTSET) + def _check_summary_get(self, expected_status, expected_response=None, + options='', authZ_header_cont=None): + """Helper method to make a GET request.""" + test_client = Client() + # Form the URL to make the GET request to + url = ''.join((reverse('CloudRecordSummaryView'), options)) + + # If content for a HTTP_AUTHORIZATION has been provided, + # make the GET request with the appropriate header + if authZ_header_cont is not None: + response = test_client.get(url, + HTTP_AUTHORIZATION=authZ_header_cont) + # Otherise, make a GET request without a HTTP_AUTHORIZATION header + else: + response = test_client.get(url) + + # Check the expected response code has been received. + self.assertEqual(response.status_code, expected_status) + + if expected_response is not None: + # Check the response received is as expected. + self.assertEqual(response.content, expected_response) + def _populate_database(self, database): """Populate the database with example summaries.""" cursor = database.cursor() From 3ab33e706c13a9ea9f23d703ce674d250866eb61 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Thu, 15 Jun 2017 09:04:18 +0100 Subject: [PATCH 08/87] Remove unneeded 'with... RETURN_HEADERS...' - as tests that dont return data dont need to specify what to return --- api/tests/test_cloud_record_summary_get.py | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/api/tests/test_cloud_record_summary_get.py b/api/tests/test_cloud_record_summary_get.py index 1f21c23..3d64fa8 100644 --- a/api/tests/test_cloud_record_summary_get.py +++ b/api/tests/test_cloud_record_summary_get.py @@ -31,11 +31,7 @@ def test_cloud_record_summary_get_IAM_fail(self): # Simulates a failure to translate a token to an ID CloudRecordSummaryView._token_to_id = Mock(return_value=None) - with self.settings(ALLOWED_FOR_GET='TestService', - RETURN_HEADERS=["WallDuration", - "Day", - "Month", - "Year"]): + with self.settings(ALLOWED_FOR_GET='TestService'): # Make (and check) the GET request self._check_summary_get(401, options=("?group=TestGroup" @@ -49,12 +45,7 @@ def test_cloud_record_summary_get_400(self): # Used in the underlying GET method CloudRecordSummaryView._token_to_id = Mock(return_value="TestService") - with self.settings(ALLOWED_FOR_GET='TestService', - RETURN_HEADERS=["WallDuration", - "Day", - "Month", - "Year"]): - + with self.settings(ALLOWED_FOR_GET='TestService'): # Make (and check) the GET request self._check_summary_get(400, options="?group=TestGroup", authZ_header_cont="Bearer TestToken") @@ -66,11 +57,7 @@ def test_cloud_record_summary_get_403(self): # Used in the underlying GET method CloudRecordSummaryView._token_to_id = Mock(return_value="FakeService") - with self.settings(ALLOWED_FOR_GET='TestService', - RETURN_HEADERS=["WallDuration", - "Day", - "Month", - "Year"]): + with self.settings(ALLOWED_FOR_GET='TestService'): # Make (and check) the GET request self._check_summary_get(403, options=("?group=TestGroup" From afa3d690405f952bc053188b08ae00566e186bc1 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 4 Sep 2017 16:48:51 +0100 Subject: [PATCH 09/87] Re-order if/else and comments - to improve readability --- api/tests/test_cloud_record_summary_get.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/tests/test_cloud_record_summary_get.py b/api/tests/test_cloud_record_summary_get.py index 3d64fa8..cc79ea7 100644 --- a/api/tests/test_cloud_record_summary_get.py +++ b/api/tests/test_cloud_record_summary_get.py @@ -133,13 +133,13 @@ def _check_summary_get(self, expected_status, expected_response=None, # Form the URL to make the GET request to url = ''.join((reverse('CloudRecordSummaryView'), options)) - # If content for a HTTP_AUTHORIZATION has been provided, - # make the GET request with the appropriate header if authZ_header_cont is not None: + # If content for a HTTP_AUTHORIZATION has been provided, + # make the GET request with the appropriate header response = test_client.get(url, HTTP_AUTHORIZATION=authZ_header_cont) - # Otherise, make a GET request without a HTTP_AUTHORIZATION header else: + # Otherise, make a GET request without a HTTP_AUTHORIZATION header response = test_client.get(url) # Check the expected response code has been received. From 7c84e5d76e0e2b1302c026f1feb9e35119247d2b Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 6 Sep 2017 10:28:07 +0100 Subject: [PATCH 10/87] Upgrade Docker image to centos7 --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 74958ed..4d74c1a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,9 @@ -FROM centos:6 +FROM centos:7 MAINTAINER APEL Administrator # Add EPEL repo so we can get pip -RUN rpm -ivh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm +RUN rpm -ivh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-10.noarch.rpm # Add IGTF trust repo, so it can be installed RUN touch /etc/yum.repos.d/EGI-trustanchors.repo From 9cce557729acba22395b52b3d2ce2a9458d5cbff Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 6 Sep 2017 10:48:00 +0100 Subject: [PATCH 11/87] Fix errors caused by systemd not being present - can't start 'fetch-crl-cron' service so need to modify the cron to not check if that service is running - use bin files to start httpd, atd and crond --- docker/run_on_entry.sh | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/docker/run_on_entry.sh b/docker/run_on_entry.sh index a7007f6..7c26888 100755 --- a/docker/run_on_entry.sh +++ b/docker/run_on_entry.sh @@ -28,17 +28,21 @@ sed -i "s|\['allowed_for_get'\]|$ALLOWED_FOR_GET|g" /var/www/html/apel_rest/sett # fetch the crl first fetch-crl -# then start the periodic fetch-url -service fetch-crl-cron start + +# alter the fetch-crl cron to run regardless of any services +echo "# Cron job running by default every 6 hours, at 45 minutes past the hour +# with +/- 3 minutes sleep. + +45 */6 * * * root /usr/sbin/fetch-crl -q -r 360" > /etc/cron.d/fetch-crl # start apache -service httpd start +/usr/sbin/httpd # start cron -service crond start +/usr/sbin/crond # start at -service atd start +/usr/sbin/atd # install IGTF trust bundle 10 minutes after start up echo "yum -y update ca-policy-egi-core >> /var/log/IGTF-startup-update.log" | at now + 10 min From 1bb7e4c07ee6708a3e7adebdd646858e226e4481 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 6 Sep 2017 11:24:24 +0100 Subject: [PATCH 12/87] Fix errors caused by httpd upgrade - the move to centos 7 means a move to httpd 2.4 - ssl is already loaded by httpd so don't need to load it again - 'Require all granted' is now the prefered syntax over 'Allow from all' and using it removes superfluous output in the logs - by default, DNs are now handled as comma seperated fields, rather than slash we use legacy slash seperated DNs to prevent re writting of the software --- conf/apel_rest_api.conf | 4 ++-- conf/ssl.conf | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/conf/apel_rest_api.conf b/conf/apel_rest_api.conf index 2a7db5c..f3b7eb9 100644 --- a/conf/apel_rest_api.conf +++ b/conf/apel_rest_api.conf @@ -5,7 +5,7 @@ RewriteRule .* - [E=HTTP_AUTHORIZATION:%{HTTP:Authorization},L] -Allow from all +Require all granted @@ -17,5 +17,5 @@ WSGIPassAuthorization On Alias /static "/usr/lib/python2.6/site-packages/rest_framework/static" -Allow from all +Require all granted diff --git a/conf/ssl.conf b/conf/ssl.conf index f117580..6e4e178 100644 --- a/conf/ssl.conf +++ b/conf/ssl.conf @@ -9,7 +9,7 @@ # consult the online docs. You have been warned. # -LoadModule ssl_module modules/mod_ssl.so +# LoadModule ssl_module modules/mod_ssl.so # # When we also provide SSL we have to listen to the @@ -39,7 +39,7 @@ SSLSessionCacheTimeout 300 # Semaphore: # Configure the path to the mutual exclusion semaphore the # SSL engine uses internally for inter-process synchronization. -SSLMutex default +Mutex default # Pseudo Random Number Generator (PRNG): # Configure one or more sources to seed the PRNG of the @@ -174,7 +174,7 @@ SSLVerifyDepth 10 # o OptRenegotiate: # This enables optimized SSL connection renegotiation handling when SSL # directives are used in per-directory context. -SSLOptions +StdEnvVars +FakeBasicAuth +ExportCertData +StrictRequire +SSLOptions +StdEnvVars +FakeBasicAuth +ExportCertData +StrictRequire +LegacyDNStringFormat SSLOptions +StdEnvVars From c12a4fe3f3e87d4b8a9587d0da9a95946d6c0602 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 6 Sep 2017 11:39:14 +0100 Subject: [PATCH 13/87] Change reference to old version of python - as centos7 comes default with python2.7 and the 'static' files are stored under the python site-packages directory --- conf/apel_rest_api.conf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conf/apel_rest_api.conf b/conf/apel_rest_api.conf index f3b7eb9..c6deda1 100644 --- a/conf/apel_rest_api.conf +++ b/conf/apel_rest_api.conf @@ -15,7 +15,7 @@ WSGIProcessGroup apel_rest WSGIScriptAlias / /var/www/html/apel_rest/wsgi.py WSGIPassAuthorization On -Alias /static "/usr/lib/python2.6/site-packages/rest_framework/static" - +Alias /static "/usr/lib/python2.7/site-packages/rest_framework/static" + Require all granted From e3e5ed2b22603dfc7ec23e18b6bef85de9e8840e Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 10:50:56 +0100 Subject: [PATCH 14/87] Add comment why ssl module is not loaded here --- conf/ssl.conf | 2 ++ 1 file changed, 2 insertions(+) diff --git a/conf/ssl.conf b/conf/ssl.conf index 6e4e178..2ec4666 100644 --- a/conf/ssl.conf +++ b/conf/ssl.conf @@ -9,6 +9,8 @@ # consult the online docs. You have been warned. # +# This is already loaded by httpd 2.4, +# so doesn't need re-loading here. # LoadModule ssl_module modules/mod_ssl.so # From a6444583adacc1b5f55bbc82c16140bf5b0ca0b3 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 10 Apr 2017 09:44:18 +0100 Subject: [PATCH 15/87] Add inital TokenChecker to validate tokens --- api/utils/TokenChecker.py | 51 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 api/utils/TokenChecker.py diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py new file mode 100644 index 0000000..ee6f80b --- /dev/null +++ b/api/utils/TokenChecker.py @@ -0,0 +1,51 @@ +"""This module contains the TokenChecker class.""" +import datetime +import logging + +from jose import jwt + + +class TokenChecker: + """This class contains methods to check a JWT token for validity.""" + + def __init__(self): + """Initialize a new TokenChecker.""" + self.logger = logging.getLogger(__name__) + # cache is used to maintain an dicitonary of issuer/cache cut off pairs + self.cache = {} + + def is_token_valid(self, token): + """Introspect a token to determine it's origin.""" + jwt_unverified_json = jwt.get_unverified_claims(token) + + if not self._is_token_json_temporally_valid(jwt_unverified_json): + return False + + return True + + def _is_token_json_temporally_valid(self, token_json): + """ + Check JWT Token JSON is temporarily valid. + + Return True if: + - Both 'Issued At' (iat) and Expired' (exp) are present + - iat is in the past + - exp is in the future + """ + now = int(datetime.datetime.now().strftime('%s')) + try: + # check 'iat' (issued at) is in the past and 'exp' (expires at) + # is in the future + if token_json['iat'] > now or token_json['exp'] < now: + self.logger.info("Token 'iat' or 'exp' invalid") + self.logger.debug(token_json) + self.logger.debug("Time now: %s", now) + return False + # it's possible a token doesn't have an 'iat' or 'exp' + except KeyError: + self.logger.info("Token missing 'iat' or 'exp'") + self.logger.debug(token_json) + return False + + # if we get here, the token is temporarily valid + return True From d9991cc218c44458098a34b4b182d9c533452fde Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 10 Apr 2017 11:52:31 +0100 Subject: [PATCH 16/87] Add JWT library as a requirement - as it's need to introspect tokens --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index aa533d9..57e790e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ dirq django==1.6.3 djangorestframework==3.0.5 +python-jose MySQL-python From 37891efb36205bb6f0e6ad4000901ae07afe210c Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 10 Apr 2017 11:58:03 +0100 Subject: [PATCH 17/87] Add __init__.py - so TokenChecker is importable --- api/utils/__init__.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 api/utils/__init__.py diff --git a/api/utils/__init__.py b/api/utils/__init__.py new file mode 100644 index 0000000..8e9fd7c --- /dev/null +++ b/api/utils/__init__.py @@ -0,0 +1,18 @@ +""" +Copyright (C) 2016 STFC. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +@author Greg Corbett +""" +__version__ = '1, 3, 0' From a7f525c3abc42fee071cc6172c4b57101f1ecce0 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 10 Apr 2017 12:54:37 +0100 Subject: [PATCH 18/87] Add test case for temporal validity check --- api/tests/test_token_checker.py | 126 ++++++++++++++++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100644 api/tests/test_token_checker.py diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py new file mode 100644 index 0000000..f0391c0 --- /dev/null +++ b/api/tests/test_token_checker.py @@ -0,0 +1,126 @@ +"""This module tests the JSON Web Token validation.""" + +import logging +import unittest +import time + +from jose import jwt + +from api.utils.TokenChecker import TokenChecker + + +# Using unittest and not django.test as no need for overhead of database +class TokenCheckerTest(unittest.TestCase): + """Tests the JSON Web Token validation.""" + + def setUp(self): + """Create a new TokenChecker and disable logging.""" + self._token_checker = TokenChecker() + logging.disable(logging.CRITICAL) + + def tearDown(self): + """Re-enable logging.""" + logging.disable(logging.NOTSET) + + def test_is_token_json_temporally_valid(self): + """ + Check that temporally invalid payload/token is detected. + + Both by: + - _is_token_json_temporally_valid + - is_token_valid + + The first method checks the temporal validity of the payload + The second method detemines wether the token is invalid + """ + payload_list = [] + + # Add a payload wihtout 'iat' or 'exp' to the payload list + # to test we reject these (as we are choosing to) + payload_list.append({ + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '714892f5-014f-43ad-bea0-fa47579db222'}) + + # Add a payload without 'exp' to the payload_list + # to test we reject these (as we are choosing to) + payload_list.append({ + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36'}) + + # Add a payload without 'iat' + # to test we reject these (as we are choosing to) + payload_list.append({ + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 200000}) + + # Add a payload with an 'iat' and 'exp' in the past + # (e.g. they have expired) to test we are + # rejecting these + payload_list.append({ + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) - 200000}) + + # Add a payload with an 'iat' and 'exp' in the future + # to test we are rejecting these (as we should as they + # are not yet valid) + payload_list.append({ + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) + 200000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 2000000}) + + for payload in payload_list: + # Assert the underlying helper method reponsible for + # checking temporal validity returns False when passed + # temporally invalid payloads + self.assertFalse( + self._token_checker._is_token_json_temporally_valid(payload), + "Payload %s should not be accepted!" % payload) + + # Assert the wrapper method is_token_valid reutrns + # False when passed temporally invalid tokens + token = self._create_token(payload, PRIVATE_KEY) + self.assertFalse( + self._token_checker.is_token_valid(token), + "Token with payload %s should not be accepted!" % payload) + + def _create_token(self, payload, key): + """Return a token, signed by key, correspond to the payload.""" + return jwt.encode(payload, key, algorithm='RS256') + +PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAqxAx1H7MabcEYhis3SJoaA3tq6wUgzKzv4c16nAW4yT21P8O +lL9qKYkzWuJWWiI90ecEHONEjDI+dFfaj/bK2O0jDT1NqVZbn2kW3sXaqUs4lUIg +5iPXysknitQjQsO1AmLZXFMNSPCKhBpMPxqG9vBMSxVMIXxXMZXeFpFIOqHFXgtq ++KmktwB2Aj/91NlSSj+Lw7bVSaZZNok/kuN/q43A6LS9uRHCQy9aeU0G8rZoqFSf +F6LypFBN8iZxaw8zlUKy2NYpu6opNUMhTxP7JmEy6yr4kMY7LUNRAKoP4tpgwwgt +hnecyprGGr93vh2qifP+bV3J3oa+ub1+jql63QIBIwKCAQAJxmk/V7PoyKEqLUu0 +3WUNQp/d7JN1NhjmX39se28Fqlc/XwglwcuNWEwT0mtVm46BBeL6ViEslSgj58NY +rwRG6PqwTKVaIjEfDVHDlkcCXBHcpLFsPI/89Y07IhCkurni4RO8IgDCVuNYAYCz +JhZXQO5qsMKFkhOcbva/dgQgm2+yX6i1lFYNstpdr8ODBhiT6Tn7B5CONbLICJFd +7SdVAORFgdOvRLHkLPcL4I6x0hautCvEf2x47kRaLGtPMsFJQSZYl+Z81whwrJTM +zGTLH4kM6qHlIhABYhqME5bCVzHYmvXW+uIgVLznfIzQFyewRdMJZzCp0XxqjOkX +yixLAoGBANbKpZVf2giL26RpUsZ8waIFc7tQAzWSqwF384XPFn8tdN1DBT3R2rQk +8gnjX07Z6YrxkEvAhb7hDgB09EGPx1nDEXnFk1Y2xXePBccdSVQvjc/ExX0YGtBi +ZCbiJW5+ORx8olxkNiEVUvik62470fOkWtrwO6qq77lc5QQVKlopAoGBAMvh280v +K7o7auQxaVnjLQIoWlnKrz3+T58R/8nYFtAuiUjlT4dsBOUFKm1GE/bw8FDFc1Vo +Z0l++KFTKNnxT6NQPRoE4JH8MZ3ycS4x0cMUK4TEW2pO11KyqlmLLdMbq1v3zgLw +GwZ/fOOi0GlItoBY0zZYlEuXxQQUNotZLRmVAoGBAMRhgXKgx1hFWxn55UfChSZr +Yn9fGOCGGLDissN7gkhkExNwedIe89fnQ7FE6Wyp+hiiWAq+pircZJK0EoUVvZPl +jFITuehsl0i9RxxyjC+2cwcaTik6nCw8s1bAIjki8mMwH2pqQB4/Yc1jlWwZb3+s +Nc98jlLlbXZGTbqXAiaLAoGBAIvOExBa3CfuOqsakWI1YLEFukwzNlZlPelrbZG4 +vy+q4cuV7WQs0CgDis6WdBcLnXk28AANE6AcjTtsOUT9PexURyfI1IFcectkat3Z +BN2KLHhMIW135BtzMvuSoxRqvqV2uSaWA+cy2Uui2A2uNAA86JpLXl+4hxi9Zzr7 +UiArAoGBALrLwrhYtwOhMoJPK+XlMTJpLFSOUiGcFg06cvDtsUCz1H0Ma1TuHNSJ +/El54J1bGpJ/h212wB+gAHE7nRNJfFn5vPJtqwMv/SW675SA4mlKi2xoBO1NW/sw +FIusZ178P2e/lc+1QJoKkrM7ZKxnDvNj2Lt3B3JmXunXWZpn4i8Q +-----END RSA PRIVATE KEY-----""" From fb31a0ccb9cdc73f0b242bad79096fcc434751af Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 12 Apr 2017 10:44:01 +0100 Subject: [PATCH 19/87] Change approved IAM to a list of approved IAMs - so that tokens from multiple IAMs could be accepted. --- apel_rest/settings.py | 3 ++- docker/run_on_entry.sh | 2 +- yaml/apel_rest_interface.env | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/apel_rest/settings.py b/apel_rest/settings.py index 4025c86..9a3f0b0 100644 --- a/apel_rest/settings.py +++ b/apel_rest/settings.py @@ -163,7 +163,8 @@ PROVIDERS_URL = 'provider_url' -IAM_URL = 'iam_url' +# The Hostnames of IAMs that can issue access tokens for the REST interface +IAM_HOSTNAME_LIST = ['allowed_iams'] SERVER_IAM_ID = 'server_iam_id' SERVER_IAM_SECRET = 'server_iam_secret' diff --git a/docker/run_on_entry.sh b/docker/run_on_entry.sh index 7c26888..a84bfad 100755 --- a/docker/run_on_entry.sh +++ b/docker/run_on_entry.sh @@ -8,7 +8,7 @@ sed -i "s|not_a_secure_secret|$DJANGO_SECRET_KEY|g" /var/www/html/apel_rest/sett sed -i "s|provider_url|$PROVIDERS_URL|g" /var/www/html/apel_rest/settings.py # IAM_URL -sed -i "s|iam_url|$IAM_URL|g" /var/www/html/apel_rest/settings.py +sed -i "s|\['allowed_iams'\]|$IAM_URLS|g" /var/www/html/apel_rest/settings.py # SERVER_IAM_ID sed -i "s|server_iam_id|$SERVER_IAM_ID|g" /var/www/html/apel_rest/settings.py diff --git a/yaml/apel_rest_interface.env b/yaml/apel_rest_interface.env index b89b9a4..e2215f9 100644 --- a/yaml/apel_rest_interface.env +++ b/yaml/apel_rest_interface.env @@ -4,7 +4,7 @@ DJANGO_SECRET_KEY=Put_a_secret_here PROVIDERS_URL=http://indigo.cloud.plgrid.pl/cmdb/service/list # The introspect URL for the IAM repsonsible for token based authN/authZ -IAM_URL=https://iam-test.indigo-datacloud.eu/introspect +IAM_URLS=[\'iam-test.indigo-datacloud.eu\'] # The ID and secret of this service as registered with the sbove IAM SERVER_IAM_ID= SERVER_IAM_SECRET= From 7c552691f3e28b75b20656835f57562f24e276cb Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 12 Apr 2017 10:47:46 +0100 Subject: [PATCH 20/87] Add more funcitonality to TokenChecker - add a primitive cache - add methods to verify the token using the issuers public key --- api/utils/TokenChecker.py | 80 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 79 insertions(+), 1 deletion(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index ee6f80b..7f9f6af 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -1,18 +1,24 @@ """This module contains the TokenChecker class.""" import datetime +import httplib import logging +import socket +from django.conf import settings from jose import jwt +from jose.exceptions import ExpiredSignatureError, JWTClaimsError, JWTError class TokenChecker: """This class contains methods to check a JWT token for validity.""" - def __init__(self): + def __init__(self, cert, key): """Initialize a new TokenChecker.""" self.logger = logging.getLogger(__name__) # cache is used to maintain an dicitonary of issuer/cache cut off pairs self.cache = {} + self._cert = cert + self._key = key def is_token_valid(self, token): """Introspect a token to determine it's origin.""" @@ -21,8 +27,80 @@ def is_token_valid(self, token): if not self._is_token_json_temporally_valid(jwt_unverified_json): return False + if not self._is_token_issuer_trusted(jwt_unverified_json): + return False + + # we can pass jwt_unverified_json['iss'] because the previous 'if' + # statement returns if jwt_unverified_json['iss'] is missing. + if not self._verify_token(token, jwt_unverified_json['iss']): + return False + + return True + + def _verify_token(self, token, issuer): + """.""" + if "https://" not in issuer: + self.logger.info('Issuer not https! Will not verify token!') + return False + + # extract the IAM hostname from the issuer + hostname = issuer.replace("https://", "").replace("/", "") + + # get the IAM's public key + key_json = self._get_issuer_public_key(hostname) + + # if we couldn't get the IAM public key, we cannot verify the token. + if key_json is None: + self.logger.info('No IAM Key found. Cannot verfiy token.') + return False + + try: + jwt.decode(token, key_json) + except (ExpiredSignatureError, JWTClaimsError, JWTError) as err: + self.logger.error('Could not validate token: %s, %s', + type(err), str(err)) + return False + return True + def _get_issuer_public_key(self, hostname): + """Return the public key of an IAM Hostname.""" + try: + conn = httplib.HTTPSConnection(hostname, + cert_file=self._cert, + key_file=self._key) + + conn.request('GET', '/jwk', {}, {}) + return conn.getresponse().read() + + except socket.gaierror as e: + slef.logger.info('socket.gaierror: %s, %s', + e.errno, e.strerror) + + return None + + def _is_token_issuer_trusted(self, token_json): + """ + Return True if the payload 'issuer' is in settings.IAM_HOSTNAME_LIST. + + Otherwise (or if 'iss' missng) return False. + """ + try: + issuer = token_json['iss'] + except KeyError: + self.logger.info("Token missing 'iss'") + self.logger.debug(token_json) + return False + + if issuer in settings.IAM_HOSTNAME_LIST: + self.logger.info("Token 'iss' from approved IAM") + self.logger.debug(token_json) + return True + else: + self.logger.info("Token 'iss' not from approved IAM") + self.logger.debug(token_json) + return False + def _is_token_json_temporally_valid(self, token_json): """ Check JWT Token JSON is temporarily valid. From a29b5c1b8aa61288fa8a2875472c1a71331b7ef0 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 09:49:18 +0100 Subject: [PATCH 21/87] Add test cases for new TokenChecker functionality - add test for valid tokens - add test for whether a token from untrusted issuer is rejected --- api/tests/test_token_checker.py | 158 +++++++++++++++++++++++++++++++- api/utils/TokenChecker.py | 2 +- 2 files changed, 157 insertions(+), 3 deletions(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index f0391c0..c6a9576 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -5,23 +5,132 @@ import time from jose import jwt +from django.test import TestCase +from mock import patch from api.utils.TokenChecker import TokenChecker # Using unittest and not django.test as no need for overhead of database -class TokenCheckerTest(unittest.TestCase): +class TokenCheckerTest(TestCase): """Tests the JSON Web Token validation.""" def setUp(self): """Create a new TokenChecker and disable logging.""" - self._token_checker = TokenChecker() + self._token_checker = TokenChecker(None, None) logging.disable(logging.CRITICAL) def tearDown(self): """Re-enable logging.""" logging.disable(logging.NOTSET) + @patch.object(TokenChecker, '_get_issuer_public_key') + def test_valid_token(self, mock_get_issuer_public_key): + """Check a valid and properly signed token is accepted.""" + # Mock the external call to retrieve the IAM public key + # used in the _verify_token and is_token_valid call + mock_get_issuer_public_key.return_value = PUBLIC_KEY + + payload_list = [] + + # This payload will be valid as we will sign it with PRIVATE_KEY + payload = { + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 200000} + + token = self._create_token(payload, PRIVATE_KEY) + + with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + self.assertTrue( + self._token_checker.is_token_valid(token), + "Token with payload %s should be accepted!" % payload) + + @patch.object(TokenChecker, '_get_issuer_public_key') + def test_verify_token(self, mock_get_issuer_public_key): + """ + Check a mis-signed/'forged' token is detected. + + Both by: + - _verify_token + - is_token_valid + + The first method checks wether the key is properly signed + The second method detemines wether the token is invalid + """ + # Mock the external call to retrieve the IAM public key + # used in the _verify_token and is_token_valid call + mock_get_issuer_public_key.return_value = PUBLIC_KEY + + payload_list = [] + + # This payload would be valid if properly signed, but we are going to + # sign it with FORGED_PRIVATE_KEY which will not match the PUBLIC_KEY + payload_list.append({ + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 200000}) + + for payload in payload_list: + token = self._create_token(payload, FORGED_PRIVATE_KEY) + with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + self.assertFalse( + self._token_checker._verify_token(token, payload['iss']), + "Payload %s should not be accepted!" % payload) + + self.assertFalse( + self._token_checker.is_token_valid(token), + "Token with payload %s should not be accepted!" % payload) + + def test_is_token_issuer_trusted(self): + """ + Check an untrusted 'issuer' (or missing 'issuer') is detected. + + Both by: + - _is_token_issuer_trusted + - is_token_valid + + The first method checks wether the issuer is + in settings.IAM_HOSTNAME_LIST + The second method detemines wether the token is invalid + """ + payload_list = [] + + # Add a payload without 'iss' field. + # to test we reject these as we cannot + # tell where it came from (so can't verify it) + payload_list.append({ + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 200000}) + + # Add a payload with a malicious 'iss' field. + # to test we reject these as we do not wantt + # to attempt to verify it + payload_list.append({ + 'iss': 'https://malicious-iam.indigo-datacloud.biz/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 200000}) + + for payload in payload_list: + token = self._create_token(payload, PRIVATE_KEY) + + with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + self.assertFalse( + self._token_checker._is_token_issuer_trusted(payload), + "Payload %s should not be accepted!" % payload) + + self.assertFalse( + self._token_checker.is_token_valid(token), + "Token with payload %s should not be accepted!" % payload) + def test_is_token_json_temporally_valid(self): """ Check that temporally invalid payload/token is detected. @@ -97,6 +206,7 @@ def _create_token(self, payload, key): """Return a token, signed by key, correspond to the payload.""" return jwt.encode(payload, key, algorithm='RS256') +# Used to sign tokens PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEAqxAx1H7MabcEYhis3SJoaA3tq6wUgzKzv4c16nAW4yT21P8O lL9qKYkzWuJWWiI90ecEHONEjDI+dFfaj/bK2O0jDT1NqVZbn2kW3sXaqUs4lUIg @@ -124,3 +234,47 @@ def _create_token(self, payload, key): /El54J1bGpJ/h212wB+gAHE7nRNJfFn5vPJtqwMv/SW675SA4mlKi2xoBO1NW/sw FIusZ178P2e/lc+1QJoKkrM7ZKxnDvNj2Lt3B3JmXunXWZpn4i8Q -----END RSA PRIVATE KEY-----""" + +# Used to verify tokens signed with PRIVATE_KEY +PUBLIC_KEY = {"keys": [{"kty": "RSA", + "n": ("qxAx1H7MabcEYhis3SJoaA3tq6wUgzKzv4c" + "16nAW4yT21P8OlL9qKYkzWuJWWiI90ecEHO" + "NEjDI-dFfaj_bK2O0jDT1NqVZbn2kW3sXaq" + "Us4lUIg5iPXysknitQjQsO1AmLZXFMNSPCK" + "hBpMPxqG9vBMSxVMIXxXMZXeFpFIOqHFXgt" + "q-KmktwB2Aj_91NlSSj-Lw7bVSaZZNok_ku" + "N_q43A6LS9uRHCQy9aeU0G8rZoqFSfF6Lyp" + "FBN8iZxaw8zlUKy2NYpu6opNUMhTxP7JmEy" + "6yr4kMY7LUNRAKoP4tpgwwgthnecyprGGr9" + "3vh2qifP-bV3J3oa-ub1-jql63Q"), + "e": "Iw"}]} + +# Used to 'forge' a token that PUBLIC_KEY will not be able to verify +FORGED_PRIVATE_KEY = """-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCxHCAbJMUth6NV +5dz3J6p13NY0deuhST4ped3ogFHvfMlj0ehJMxLVe/B5oooyQkaSyA0yc4eBfYxj +W4tmOGyWZic1wLhQUQpXvdVcmt+f26GewnIvWOI4MSg1C+Qg9E4I/eIlpz0jrkbq +q8k/x4pr/c+X8TNiaiZb2Sjp/SFdpwqZ4Eh8u3PPc/B5/mJfvg0T8mrjZ8kQkPcB +SH+JzbzyFuEpa1OqAWeSOaQniyS2SEdW8DT0/AlQcf3UmvR0Dh5N6KltwWdWdocg +1jsLCYa32/8uiBJ3JM/C+vWtBjFGGoHp4msk9VAUnq9oWA5z5NT7G1lpoAj1Hjuu +MnRM9cGnAgMBAAECggEBAIXzrri428TuxHNwMepgfsU77GqrETbgLXqzKEnz24SV +TcAIf3X1gfYjEiL88ybGB5h2Y7zXshIXAboX/9ulK0OpKVi3VO+yC2+HLTsoC6Bd +PeTUTgZPZHF5hF5yiuz9uZOFaahuz4gQBKTynnh1k9TPl1Xk4Kc7f52SJiarA7RP +In7R/YDW6Wxg8fCID1L2McFxlAdHF94oG1qNe1AriaUKZ9MUKaGxmdxuAk3pA1cC +IqTQYHb1zgJ8oFlhqYVubTL/85ADVV1aY0/UKZcwL1xLJ2xifGvbYPUs5gMJ0w0F +zHNdhZ6991/F4Txr9Q3kwZX8uwxFFgWSh2qE2aJno8ECgYEA5OLDb3G6zIpbmAU8 +LbfWt/PgsQAa0XtnCrzyz5SnBjPqbesPrg6VxPVdP2/OW+yBvtxmgo3M5xC7LV8C +x1/In90FXa2KfSXj2OENR/ks55BVdKcI+Jmljmum8AkPvdwb42ztpKgSC8BhItKd +G5Ft1B2t6EJZY2SPL8UbUXtAkgcCgYEAxhcx2zPlC+x7Y0oKidZswR3M2iB56One +3dFabzWRA+P7/YA1VM+VPppSDr8AqGpiv0rLh7xK0R6usjmZ1Z/X4oQDF5uiH0uK +DqsXDF61fFjKClfY4WcUzlcolJ8AD9q50o7bc+hc2WEWxbh/iqfzEWYIa3Y+cuUz +XMOZJux+62ECgYBTFREV6fWBe5OF2hifC8VQHqFn/n69nYqoti95NB9wu/WTkqit +aLPqu5nuhfolGfN6wWwgZbKECWm4LW3Hyzf693KUL4M+rDtJpV95ybQIFjc+0ccK +3lLfIKqHJPLm2vfwlMCqbSunwlxAFK1crWxte5x921+xGXZ0Q5sH97JXjwKBgEGa +HODDZu9z+ckAFE1hvdKW0+jJKJaCHVTIqHJ8AvKO5j0l4IOd24dIBDTt/IHJ+bnw +Q0dIjF6FEsXjXZbpwM07euqumBpVIfuJnbBzDReJMCAMx76eLL3JD59oqNSXU0Lw +HK1eHqG/DZOdbl+1D0KLz+4G0teqIEBwZqAFYmMBAoGBANmBGtkC6APqRe5Dzz9F +z5L9Mt9Krz8EI6s43XA4fYhouw07zGY0816BGa7r772duZkfh/J8kuxWRdvseo5G +y3EDz4+nl+tzxzYvbsSNOK8ceJRHNwJQPZuq166svKGLe6tj65MtfvIUTzWUU9FW +OLxDCvBa2CgAJVfUO1MhtX/L +-----END PRIVATE KEY-----""" diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index 7f9f6af..e8a98dd 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -38,7 +38,7 @@ def is_token_valid(self, token): return True def _verify_token(self, token, issuer): - """.""" + """Fetch IAM public key and veifry token against it.""" if "https://" not in issuer: self.logger.info('Issuer not https! Will not verify token!') return False From b83c711570f82f1db1b6bee8f975d13c8795b47a Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 09:49:36 +0100 Subject: [PATCH 22/87] Use hostname rather than URL to authZ IAMs - so the settigns variable is a list of hostnames --- api/utils/TokenChecker.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index e8a98dd..f060d4d 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -81,7 +81,7 @@ def _get_issuer_public_key(self, hostname): def _is_token_issuer_trusted(self, token_json): """ - Return True if the payload 'issuer' is in settings.IAM_HOSTNAME_LIST. + Return True if the 'issuer' hostname is in settings.IAM_HOSTNAME_LIST. Otherwise (or if 'iss' missng) return False. """ @@ -92,7 +92,10 @@ def _is_token_issuer_trusted(self, token_json): self.logger.debug(token_json) return False - if issuer in settings.IAM_HOSTNAME_LIST: + # extract the IAM hostname from the issuer + hostname = issuer.replace("https://", "").replace("/", "") + + if hostname in settings.IAM_HOSTNAME_LIST: self.logger.info("Token 'iss' from approved IAM") self.logger.debug(token_json) return True From a326675860d9ce7ffa12a5648e77daf558251854 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 11 Jul 2017 13:00:39 +0100 Subject: [PATCH 23/87] Cache valid tokens and allow cached tokens access --- api/utils/TokenChecker.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index f060d4d..534abdb 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -5,6 +5,7 @@ import socket from django.conf import settings +from django.core.cache import cache from jose import jwt from jose.exceptions import ExpiredSignatureError, JWTClaimsError, JWTError @@ -24,6 +25,12 @@ def is_token_valid(self, token): """Introspect a token to determine it's origin.""" jwt_unverified_json = jwt.get_unverified_claims(token) + # if token is in the cache, we say it is valid + if cache.get(jwt_unverified_json['sub']) == token: + self.logger.info("Token is currently in cache. Granting access.") + return True + + # otherwise, we need to validate the token if not self._is_token_json_temporally_valid(jwt_unverified_json): return False @@ -35,6 +42,16 @@ def is_token_valid(self, token): if not self._verify_token(token, jwt_unverified_json['iss']): return False + # if the execution gets here, we can cache the token + # cache is a key: value like structure with an optional timeout + # as we only need the token stored we use that as the key + # and None as the value. + # Cache timeout set to 300 seconds to enable quick revocation + # but also limit the number of requests to the IAM instance + # Caching is also done after token validation to ensure + # only valids tokens are cached + cache.set(jwt_unverified_json['sub'], token, 300) + return True def _verify_token(self, token, issuer): From 5e49e035a80745f48e93aa80e7af0809a8acea20 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 11 Jul 2017 13:07:35 +0100 Subject: [PATCH 24/87] Add tests for cache functionality --- api/tests/test_token_checker.py | 78 +++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index c6a9576..895179b 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -24,6 +24,84 @@ def tearDown(self): """Re-enable logging.""" logging.disable(logging.NOTSET) + @patch.object(TokenChecker, '_get_issuer_public_key') + def test_token_cache(self, mock_get_issuer_public_key): + """ + Check a cached token is granted access. + + Method does this by checking a token is valid twice, the first time + the token is validate and stored in a cache, the second time access + should be granted because the token is in the cache, not because the + token is valid. + """ + # Mock the external call to retrieve the IAM public key + # used in the _verify_token and is_token_valid call + mock_get_issuer_public_key.return_value = PUBLIC_KEY + + payload_list = [] + + # This payload will be valid as we will sign it with PRIVATE_KEY + payload = { + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 200000} + + # Add the same token twice, this is what tests the cache functionality + payload_list = [payload, payload] + + for payload in payload_list: + token = self._create_token(payload, PRIVATE_KEY) + with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + self.assertTrue( + self._token_checker.is_token_valid(token), + "Token with payload %s should not be accepted!" % payload) + + @patch.object(TokenChecker, '_get_issuer_public_key') + def test_token_cache_mis_matche(self, mock_get_issuer_public_key): + """ + Check tokens with the same subject are handled correctly. + + Having a token cached for the sub should not be sufficent to grant + access, the tokens must match. + """ + # Mock the external call to retrieve the IAM public key + # used in the _verify_token and is_token_valid call + mock_get_issuer_public_key.return_value = PUBLIC_KEY + + payload_list = [] + + # This payload will be valid as we will sign it with PRIVATE_KEY + payload1 = { + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) + 200000} + + # This payload has a subject that will be in the cache, but this + # new token is not. We need to ensure this invalid token does not + # get granted rights based only on it's sub being in the cache + payload2 = { + 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'exp': int(time.time()) - 200} + + token1 = self._create_token(payload1, PRIVATE_KEY) + token2 = self._create_token(payload2, PRIVATE_KEY) + + with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + self.assertTrue( + self._token_checker.is_token_valid(token1), + "Token with payload %s should not be accepted!" % payload1) + + self.assertFalse( + self._token_checker.is_token_valid(token2), + "Token with payload %s should not be accepted!" % payload2) + @patch.object(TokenChecker, '_get_issuer_public_key') def test_valid_token(self, mock_get_issuer_public_key): """Check a valid and properly signed token is accepted.""" From 7dcfe37631d9e0f2d982d581fec18b9beb6fb9f6 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 11 Jul 2017 13:11:27 +0100 Subject: [PATCH 25/87] Fix pep8 issues caused by long example urls --- api/tests/test_token_checker.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index 895179b..fdd7741 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -42,7 +42,7 @@ def test_token_cache(self, mock_get_issuer_public_key): # This payload will be valid as we will sign it with PRIVATE_KEY payload = { - 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', @@ -53,7 +53,7 @@ def test_token_cache(self, mock_get_issuer_public_key): for payload in payload_list: token = self._create_token(payload, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertTrue( self._token_checker.is_token_valid(token), "Token with payload %s should not be accepted!" % payload) @@ -74,7 +74,7 @@ def test_token_cache_mis_matche(self, mock_get_issuer_public_key): # This payload will be valid as we will sign it with PRIVATE_KEY payload1 = { - 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', @@ -84,7 +84,7 @@ def test_token_cache_mis_matche(self, mock_get_issuer_public_key): # new token is not. We need to ensure this invalid token does not # get granted rights based only on it's sub being in the cache payload2 = { - 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', @@ -93,14 +93,14 @@ def test_token_cache_mis_matche(self, mock_get_issuer_public_key): token1 = self._create_token(payload1, PRIVATE_KEY) token2 = self._create_token(payload2, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertTrue( self._token_checker.is_token_valid(token1), "Token with payload %s should not be accepted!" % payload1) self.assertFalse( self._token_checker.is_token_valid(token2), - "Token with payload %s should not be accepted!" % payload2) + "Token with payload %s should not be accepted!" % payload2) @patch.object(TokenChecker, '_get_issuer_public_key') def test_valid_token(self, mock_get_issuer_public_key): @@ -113,7 +113,7 @@ def test_valid_token(self, mock_get_issuer_public_key): # This payload will be valid as we will sign it with PRIVATE_KEY payload = { - 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', @@ -121,7 +121,7 @@ def test_valid_token(self, mock_get_issuer_public_key): token = self._create_token(payload, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertTrue( self._token_checker.is_token_valid(token), "Token with payload %s should be accepted!" % payload) @@ -147,7 +147,7 @@ def test_verify_token(self, mock_get_issuer_public_key): # This payload would be valid if properly signed, but we are going to # sign it with FORGED_PRIVATE_KEY which will not match the PUBLIC_KEY payload_list.append({ - 'iss': 'https://iam-test.indigo-datacloud.eu/', + 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', @@ -155,7 +155,7 @@ def test_verify_token(self, mock_get_issuer_public_key): for payload in payload_list: token = self._create_token(payload, FORGED_PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertFalse( self._token_checker._verify_token(token, payload['iss']), "Payload %s should not be accepted!" % payload) @@ -191,7 +191,7 @@ def test_is_token_issuer_trusted(self): # to test we reject these as we do not wantt # to attempt to verify it payload_list.append({ - 'iss': 'https://malicious-iam.indigo-datacloud.biz/', + 'iss': 'https://malicious-iam.idc.biz/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', @@ -200,7 +200,7 @@ def test_is_token_issuer_trusted(self): for payload in payload_list: token = self._create_token(payload, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.indigo-datacloud.eu'): + with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertFalse( self._token_checker._is_token_issuer_trusted(payload), "Payload %s should not be accepted!" % payload) From c1f5d13fb778968ebbd6cb80e9cd599731722d13 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 11 Jul 2017 15:27:31 +0100 Subject: [PATCH 26/87] Move token_to_id functionality to TokenChecker - as to check if the IAM has revoked the token requires knowing the IAM that issued it, that information is now in the TokenChecker module - alters the logging ouptut slightly so that it is still useful following this move --- api/utils/TokenChecker.py | 73 ++++++++++++++++++++++++----- api/views/CloudRecordSummaryView.py | 43 +++++------------ 2 files changed, 71 insertions(+), 45 deletions(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index 534abdb..69fc53a 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -1,8 +1,11 @@ """This module contains the TokenChecker class.""" +import base64 import datetime import httplib +import json import logging import socket +import urllib2 from django.conf import settings from django.core.cache import cache @@ -21,26 +24,37 @@ def __init__(self, cert, key): self._cert = cert self._key = key - def is_token_valid(self, token): + def valid_token_to_id(self, token): """Introspect a token to determine it's origin.""" jwt_unverified_json = jwt.get_unverified_claims(token) + unverified_token_id = jwt_unverified_json['sub'] # if token is in the cache, we say it is valid - if cache.get(jwt_unverified_json['sub']) == token: - self.logger.info("Token is currently in cache. Granting access.") - return True + if cache.get(unverified_token_id) == token: + self.logger.info("Token for user %s is in cache." % + unverified_token_id) + + return jwt_unverified_json['sub'] # otherwise, we need to validate the token if not self._is_token_json_temporally_valid(jwt_unverified_json): - return False + return None if not self._is_token_issuer_trusted(jwt_unverified_json): - return False + return None - # we can pass jwt_unverified_json['iss'] because the previous 'if' - # statement returns if jwt_unverified_json['iss'] is missing. - if not self._verify_token(token, jwt_unverified_json['iss']): - return False + issuer = jwt_unverified_json['iss'] + # we can pass issuer because the previous 'if' statement + # returns if jwt_unverified_json['iss'] is missing. + issuer = jwt_unverified_json['iss'] + if not self._verify_token(token, issuer): + return None + + # check the token has not been revoked + verifed_token_id = self._check_token_not_revoked(token, issuer) + # if the IAM disagrees with the token sub, reject ir + if verifed_token_id != jwt_unverified_json['sub']: + return None # if the execution gets here, we can cache the token # cache is a key: value like structure with an optional timeout @@ -52,7 +66,40 @@ def is_token_valid(self, token): # only valids tokens are cached cache.set(jwt_unverified_json['sub'], token, 300) - return True + return jwt_unverified_json['sub'] + + def _check_token_not_revoked(self, token, issuer): + """Contact issuer to check if the token has been revoked or not.""" + if "https://" not in issuer: + self.logger.info('Issuer not https! Ending revokation check!') + return None + + try: + auth_request = urllib2.Request('%s/introspect' % issuer, + data='token=%s' % token) + + server_id = settings.SERVER_IAM_ID + server_secret = settings.SERVER_IAM_SECRET + + encode_string = '%s:%s' % (server_id, server_secret) + + base64string = base64.encodestring(encode_string).replace('\n', '') + + auth_request.add_header("Authorization", "Basic %s" % base64string) + auth_result = urllib2.urlopen(auth_request) + + auth_json = json.loads(auth_result.read()) + client_id = auth_json['client_id'] + + except (urllib2.HTTPError, + urllib2.URLError, + httplib.HTTPException, + KeyError) as error: + self.logger.error("%s: %s", type(error), str(error)) + return None + + self.logger.info("Token identifed as belonging to %s", client_id) + return client_id def _verify_token(self, token, issuer): """Fetch IAM public key and veifry token against it.""" @@ -113,11 +160,11 @@ def _is_token_issuer_trusted(self, token_json): hostname = issuer.replace("https://", "").replace("/", "") if hostname in settings.IAM_HOSTNAME_LIST: - self.logger.info("Token 'iss' from approved IAM") + self.logger.info("Token 'iss' is an approved IAM") self.logger.debug(token_json) return True else: - self.logger.info("Token 'iss' not from approved IAM") + self.logger.info("Token 'iss' is not an approved IAM") self.logger.debug(token_json) return False diff --git a/api/views/CloudRecordSummaryView.py b/api/views/CloudRecordSummaryView.py index 06178ea..d192c66 100644 --- a/api/views/CloudRecordSummaryView.py +++ b/api/views/CloudRecordSummaryView.py @@ -15,6 +15,8 @@ from rest_framework.response import Response from rest_framework.views import APIView +from api.utils.TokenChecker import TokenChecker + class CloudRecordSummaryView(APIView): """ @@ -46,6 +48,8 @@ class CloudRecordSummaryView(APIView): def __init__(self): """Set up class level logging.""" self.logger = logging.getLogger(__name__) + self._token_checker = TokenChecker('/etc/httpd/ssl/apache.crt', + '/etc/httpd/ssl/apache.key') super(CloudRecordSummaryView, self).__init__() def get(self, request, format=None): @@ -76,7 +80,10 @@ def get(self, request, format=None): if client_token is None: return Response(status=401) - client_id = self._token_to_id(client_token) + # The token checker will introspect the token, + # i.e. check it's in-date, correctly signed etc + # and return the client_id of the token + client_id = self._token_checker.valid_token_to_id(client_token) if client_id is None: return Response(status=401) @@ -272,38 +279,10 @@ def _request_to_token(self, request): "but not of expected form.") self.logger.error(request.META['HTTP_AUTHORIZATION']) return None - self.logger.info("Authenticated: %s...", token[:15]) - self.logger.debug("Full token: %s", token) + self.logger.info("Successfully extracted Token") + self.logger.debug("Full Token: %s", token) return token - def _token_to_id(self, token): - """Convert a token to a IAM ID (external dependency).""" - try: - auth_request = urllib2.Request(settings.IAM_URL, - data='token=%s' % token) - - server_id = settings.SERVER_IAM_ID - server_secret = settings.SERVER_IAM_SECRET - - encode_string = '%s:%s' % (server_id, server_secret) - - base64string = base64.encodestring(encode_string).replace('\n', '') - - auth_request.add_header("Authorization", "Basic %s" % base64string) - auth_result = urllib2.urlopen(auth_request) - - auth_json = json.loads(auth_result.read()) - client_id = auth_json['client_id'] - except (urllib2.HTTPError, - urllib2.URLError, - httplib.HTTPException, - KeyError) as error: - self.logger.error("%s: %s", type(error), str(error)) - return None - self.logger.info("Token identifed as %s...", client_id[:15]) - self.logger.debug("Full CLIENT_ID: %s", client_id) - return client_id - def _is_client_authorized(self, client_id): """ Return true if and only if client_id can access summaries. @@ -314,5 +293,5 @@ def _is_client_authorized(self, client_id): self.logger.error("%s does not have permission to view summaries", client_id) return False - self.logger.info("Authorizing %s...", client_id[:15]) + self.logger.info("Authorizing user request") return True From b9eff5ab2cf88243e28c563ae46bbb2681cc142f Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 11 Jul 2017 16:53:16 +0100 Subject: [PATCH 27/87] Fix TokenChecker tests to check for None values - as the methods now return None to be consistent with helper methods in CloudRecordSummaryView --- api/tests/test_token_checker.py | 101 ++++++++++++++++++-------------- 1 file changed, 58 insertions(+), 43 deletions(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index fdd7741..d565071 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -25,7 +25,9 @@ def tearDown(self): logging.disable(logging.NOTSET) @patch.object(TokenChecker, '_get_issuer_public_key') - def test_token_cache(self, mock_get_issuer_public_key): + @patch.object(TokenChecker, '_check_token_not_revoked') + def test_token_cache(self, mock_check_token_not_revoked, + mock_get_issuer_public_key): """ Check a cached token is granted access. @@ -35,8 +37,11 @@ def test_token_cache(self, mock_get_issuer_public_key): token is valid. """ # Mock the external call to retrieve the IAM public key - # used in the _verify_token and is_token_valid call + # used in the _verify_token and valid_token_to_id call mock_get_issuer_public_key.return_value = PUBLIC_KEY + # Mock the external call to check the token has not been rejected + # used in the valid_token_to_id call + mock_check_token_not_revoked.return_value = CLIENT_ID payload_list = [] @@ -45,7 +50,7 @@ def test_token_cache(self, mock_get_issuer_public_key): 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000} # Add the same token twice, this is what tests the cache functionality @@ -54,12 +59,14 @@ def test_token_cache(self, mock_get_issuer_public_key): for payload in payload_list: token = self._create_token(payload, PRIVATE_KEY) with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): - self.assertTrue( - self._token_checker.is_token_valid(token), + self.assertEqual( + self._token_checker.valid_token_to_id(token), CLIENT_ID, "Token with payload %s should not be accepted!" % payload) @patch.object(TokenChecker, '_get_issuer_public_key') - def test_token_cache_mis_matche(self, mock_get_issuer_public_key): + @patch.object(TokenChecker, '_check_token_not_revoked') + def test_token_cache_mis_match(self, mock_check_token_not_revoked, + mock_get_issuer_public_key): """ Check tokens with the same subject are handled correctly. @@ -67,17 +74,18 @@ def test_token_cache_mis_matche(self, mock_get_issuer_public_key): access, the tokens must match. """ # Mock the external call to retrieve the IAM public key - # used in the _verify_token and is_token_valid call + # used in the _verify_token and valid_token_to_id call mock_get_issuer_public_key.return_value = PUBLIC_KEY - - payload_list = [] + # Mock the external call to check the token has not been rejected + # used in the valid_token_to_id call + mock_check_token_not_revoked.return_value = CLIENT_ID # This payload will be valid as we will sign it with PRIVATE_KEY payload1 = { 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000} # This payload has a subject that will be in the cache, but this @@ -87,43 +95,47 @@ def test_token_cache_mis_matche(self, mock_get_issuer_public_key): 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) - 200} token1 = self._create_token(payload1, PRIVATE_KEY) token2 = self._create_token(payload2, PRIVATE_KEY) with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): - self.assertTrue( - self._token_checker.is_token_valid(token1), + self.assertEqual( + self._token_checker.valid_token_to_id(token1), CLIENT_ID, "Token with payload %s should not be accepted!" % payload1) - self.assertFalse( - self._token_checker.is_token_valid(token2), - "Token with payload %s should not be accepted!" % payload2) + self.assertEqual( + self._token_checker.valid_token_to_id(token2), None, + "Token with payload %s should not be accepted!" % payload2) @patch.object(TokenChecker, '_get_issuer_public_key') - def test_valid_token(self, mock_get_issuer_public_key): + @patch.object(TokenChecker, '_check_token_not_revoked') + def test_valid_token(self, mock_check_token_not_revoked, + mock_get_issuer_public_key): """Check a valid and properly signed token is accepted.""" # Mock the external call to retrieve the IAM public key - # used in the _verify_token and is_token_valid call + # used in the _verify_token and valid_token_to_id call mock_get_issuer_public_key.return_value = PUBLIC_KEY - - payload_list = [] + # Mock the external call to check the token has not been rejected + # used in the valid_token_to_id call + mock_check_token_not_revoked.return_value = CLIENT_ID # This payload will be valid as we will sign it with PRIVATE_KEY payload = { 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000} token = self._create_token(payload, PRIVATE_KEY) with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): - self.assertTrue( - self._token_checker.is_token_valid(token), + client_id = payload['sub'] + self.assertEqual( + self._token_checker.valid_token_to_id(token), client_id, "Token with payload %s should be accepted!" % payload) @patch.object(TokenChecker, '_get_issuer_public_key') @@ -133,13 +145,13 @@ def test_verify_token(self, mock_get_issuer_public_key): Both by: - _verify_token - - is_token_valid + - valid_token_to_id The first method checks wether the key is properly signed The second method detemines wether the token is invalid """ # Mock the external call to retrieve the IAM public key - # used in the _verify_token and is_token_valid call + # used in the _verify_token and valid_token_to_id call mock_get_issuer_public_key.return_value = PUBLIC_KEY payload_list = [] @@ -150,7 +162,7 @@ def test_verify_token(self, mock_get_issuer_public_key): 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000}) for payload in payload_list: @@ -160,8 +172,8 @@ def test_verify_token(self, mock_get_issuer_public_key): self._token_checker._verify_token(token, payload['iss']), "Payload %s should not be accepted!" % payload) - self.assertFalse( - self._token_checker.is_token_valid(token), + self.assertEqual( + self._token_checker.valid_token_to_id(token), None, "Token with payload %s should not be accepted!" % payload) def test_is_token_issuer_trusted(self): @@ -170,7 +182,7 @@ def test_is_token_issuer_trusted(self): Both by: - _is_token_issuer_trusted - - is_token_valid + - valid_token_to_id The first method checks wether the issuer is in settings.IAM_HOSTNAME_LIST @@ -184,7 +196,7 @@ def test_is_token_issuer_trusted(self): payload_list.append({ 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000}) # Add a payload with a malicious 'iss' field. @@ -194,7 +206,7 @@ def test_is_token_issuer_trusted(self): 'iss': 'https://malicious-iam.idc.biz/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000}) for payload in payload_list: @@ -205,8 +217,8 @@ def test_is_token_issuer_trusted(self): self._token_checker._is_token_issuer_trusted(payload), "Payload %s should not be accepted!" % payload) - self.assertFalse( - self._token_checker.is_token_valid(token), + self.assertEqual( + self._token_checker.valid_token_to_id(token), None, "Token with payload %s should not be accepted!" % payload) def test_is_token_json_temporally_valid(self): @@ -215,7 +227,7 @@ def test_is_token_json_temporally_valid(self): Both by: - _is_token_json_temporally_valid - - is_token_valid + - valid_token_to_id The first method checks the temporal validity of the payload The second method detemines wether the token is invalid @@ -225,7 +237,7 @@ def test_is_token_json_temporally_valid(self): # Add a payload wihtout 'iat' or 'exp' to the payload list # to test we reject these (as we are choosing to) payload_list.append({ - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '714892f5-014f-43ad-bea0-fa47579db222'}) @@ -235,14 +247,14 @@ def test_is_token_json_temporally_valid(self): 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36'}) + 'sub': CLIENT_ID}) # Add a payload without 'iat' # to test we reject these (as we are choosing to) payload_list.append({ 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000}) # Add a payload with an 'iat' and 'exp' in the past @@ -252,7 +264,7 @@ def test_is_token_json_temporally_valid(self): 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) - 200000}) # Add a payload with an 'iat' and 'exp' in the future @@ -262,7 +274,7 @@ def test_is_token_json_temporally_valid(self): 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) + 200000, - 'sub': 'ac2f23e0-8103-4581-8014-e0e82c486e36', + 'sub': CLIENT_ID, 'exp': int(time.time()) + 2000000}) for payload in payload_list: @@ -273,17 +285,20 @@ def test_is_token_json_temporally_valid(self): self._token_checker._is_token_json_temporally_valid(payload), "Payload %s should not be accepted!" % payload) - # Assert the wrapper method is_token_valid reutrns - # False when passed temporally invalid tokens + # Assert the wrapper method valid_token_to_id reutrns + # None when passed temporally invalid tokens token = self._create_token(payload, PRIVATE_KEY) - self.assertFalse( - self._token_checker.is_token_valid(token), + self.assertEqual( + self._token_checker.valid_token_to_id(token), None, "Token with payload %s should not be accepted!" % payload) def _create_token(self, payload, key): """Return a token, signed by key, correspond to the payload.""" return jwt.encode(payload, key, algorithm='RS256') +# Use this Client ID in tokens +CLIENT_ID = 'ac2f23e0-8103-4581-8014-e0e82c486e36' + # Used to sign tokens PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEAqxAx1H7MabcEYhis3SJoaA3tq6wUgzKzv4c16nAW4yT21P8O From 1cf5bf1a646ba3a7a518df2f19c3fe802aec804b Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 12 Jul 2017 09:38:46 +0100 Subject: [PATCH 28/87] Improve logging around token handling --- api/utils/TokenChecker.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index 69fc53a..427868c 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -29,10 +29,11 @@ def valid_token_to_id(self, token): jwt_unverified_json = jwt.get_unverified_claims(token) unverified_token_id = jwt_unverified_json['sub'] + self.logger.info('Token claims to be from %s' % unverified_token_id) + # if token is in the cache, we say it is valid if cache.get(unverified_token_id) == token: - self.logger.info("Token for user %s is in cache." % - unverified_token_id) + self.logger.info("Token is in cache.") return jwt_unverified_json['sub'] @@ -56,6 +57,7 @@ def valid_token_to_id(self, token): if verifed_token_id != jwt_unverified_json['sub']: return None + self.logger.info('Token validated') # if the execution gets here, we can cache the token # cache is a key: value like structure with an optional timeout # as we only need the token stored we use that as the key @@ -98,7 +100,6 @@ def _check_token_not_revoked(self, token, issuer): self.logger.error("%s: %s", type(error), str(error)) return None - self.logger.info("Token identifed as belonging to %s", client_id) return client_id def _verify_token(self, token, issuer): @@ -160,7 +161,7 @@ def _is_token_issuer_trusted(self, token_json): hostname = issuer.replace("https://", "").replace("/", "") if hostname in settings.IAM_HOSTNAME_LIST: - self.logger.info("Token 'iss' is an approved IAM") + self.logger.info("Token 'iss' %s is an approved IAM" % hostname) self.logger.debug(token_json) return True else: From 74fda23357df6cabcdfe67f9c7a7ef4f2ccf1b05 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 12 Jul 2017 09:55:49 +0100 Subject: [PATCH 29/87] Improve handling of malformed tokens --- api/utils/TokenChecker.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index 427868c..054f3d8 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -26,7 +26,12 @@ def __init__(self, cert, key): def valid_token_to_id(self, token): """Introspect a token to determine it's origin.""" - jwt_unverified_json = jwt.get_unverified_claims(token) + try: + jwt_unverified_json = jwt.get_unverified_claims(token) + except JWTError: + self.logger.error('Token cannot be decoded.') + self.logger.debug('Full token: %s' %token) + return None unverified_token_id = jwt_unverified_json['sub'] self.logger.info('Token claims to be from %s' % unverified_token_id) From a9c60759d70ea163a72a313920c01e74451a7da7 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 10:36:33 +0100 Subject: [PATCH 30/87] Rework Token Checker to use urllib2 - so we only have to use one method. urrlib2 chosen over httplib as it is the higher level library --- api/utils/TokenChecker.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index 054f3d8..dca9f9f 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -4,7 +4,6 @@ import httplib import json import logging -import socket import urllib2 from django.conf import settings @@ -133,20 +132,20 @@ def _verify_token(self, token, issuer): return True - def _get_issuer_public_key(self, hostname): + def _get_issuer_public_key(self, issuer): """Return the public key of an IAM Hostname.""" try: - conn = httplib.HTTPSConnection(hostname, - cert_file=self._cert, - key_file=self._key) + key_request = urllib2.Request('%s/jwk' % issuer) + key_result = urllib2.urlopen(key_request) - conn.request('GET', '/jwk', {}, {}) - return conn.getresponse().read() - - except socket.gaierror as e: - slef.logger.info('socket.gaierror: %s, %s', - e.errno, e.strerror) + key_json = json.loads(key_result.read()) + return key_json + except (urllib2.HTTPError, + urllib2.URLError, + httplib.HTTPException, + KeyError) as error: + self.logger.error("%s: %s", type(error), str(error)) return None def _is_token_issuer_trusted(self, token_json): From 13336bb385cd2eb77beaf2f08c695c5e5a2d5ffd Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 10:37:22 +0100 Subject: [PATCH 31/87] Remove primitive cache - as we are using djangos cache --- api/utils/TokenChecker.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index dca9f9f..aabd5b6 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -15,13 +15,9 @@ class TokenChecker: """This class contains methods to check a JWT token for validity.""" - def __init__(self, cert, key): + def __init__(self): """Initialize a new TokenChecker.""" self.logger = logging.getLogger(__name__) - # cache is used to maintain an dicitonary of issuer/cache cut off pairs - self.cache = {} - self._cert = cert - self._key = key def valid_token_to_id(self, token): """Introspect a token to determine it's origin.""" From a1b39cf12762573fd8d8194d140a9a46c4c8874c Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 10:38:14 +0100 Subject: [PATCH 32/87] Improve logging of tokens further --- api/utils/TokenChecker.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index aabd5b6..92035ce 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -25,11 +25,11 @@ def valid_token_to_id(self, token): jwt_unverified_json = jwt.get_unverified_claims(token) except JWTError: self.logger.error('Token cannot be decoded.') - self.logger.debug('Full token: %s' %token) + self.logger.debug('Full token: %s', token) return None unverified_token_id = jwt_unverified_json['sub'] - self.logger.info('Token claims to be from %s' % unverified_token_id) + self.logger.info('Token claims to be from %s', unverified_token_id) # if token is in the cache, we say it is valid if cache.get(unverified_token_id) == token: @@ -161,7 +161,7 @@ def _is_token_issuer_trusted(self, token_json): hostname = issuer.replace("https://", "").replace("/", "") if hostname in settings.IAM_HOSTNAME_LIST: - self.logger.info("Token 'iss' %s is an approved IAM" % hostname) + self.logger.info("Token 'iss' %s is an approved IAM", hostname) self.logger.debug(token_json) return True else: From 1432f5482ef4ba9356e73aea3ad88cb2c94a4b37 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 10:39:21 +0100 Subject: [PATCH 33/87] Pass issuer to get_issuer_public_method - so we can agian check the issuer is https before connecting --- api/utils/TokenChecker.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index 92035ce..da9ebe1 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -108,11 +108,8 @@ def _verify_token(self, token, issuer): self.logger.info('Issuer not https! Will not verify token!') return False - # extract the IAM hostname from the issuer - hostname = issuer.replace("https://", "").replace("/", "") - # get the IAM's public key - key_json = self._get_issuer_public_key(hostname) + key_json = self._get_issuer_public_key(issuer) # if we couldn't get the IAM public key, we cannot verify the token. if key_json is None: From a94aa1ca9c0981b6688a76219fbd55ceb14d7fac Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 12 Jul 2017 10:13:07 +0100 Subject: [PATCH 34/87] Remove unneeded import and variables --- api/tests/test_token_checker.py | 3 +-- api/views/CloudRecordSummaryView.py | 7 +------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index d565071..5b7699b 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -1,7 +1,6 @@ """This module tests the JSON Web Token validation.""" import logging -import unittest import time from jose import jwt @@ -17,7 +16,7 @@ class TokenCheckerTest(TestCase): def setUp(self): """Create a new TokenChecker and disable logging.""" - self._token_checker = TokenChecker(None, None) + self._token_checker = TokenChecker() logging.disable(logging.CRITICAL) def tearDown(self): diff --git a/api/views/CloudRecordSummaryView.py b/api/views/CloudRecordSummaryView.py index d192c66..22f0826 100644 --- a/api/views/CloudRecordSummaryView.py +++ b/api/views/CloudRecordSummaryView.py @@ -1,13 +1,9 @@ """This file contains the CloudRecordSummaryView class.""" -import base64 import ConfigParser import datetime -import httplib -import json import logging import MySQLdb -import urllib2 from rest_framework.pagination import PaginationSerializer from django.conf import settings @@ -48,8 +44,7 @@ class CloudRecordSummaryView(APIView): def __init__(self): """Set up class level logging.""" self.logger = logging.getLogger(__name__) - self._token_checker = TokenChecker('/etc/httpd/ssl/apache.crt', - '/etc/httpd/ssl/apache.key') + self._token_checker = TokenChecker() super(CloudRecordSummaryView, self).__init__() def get(self, request, format=None): From 9742af2eb0f74d51c3c5527823f76cee1592b551 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 31 Jul 2017 07:36:26 +0100 Subject: [PATCH 35/87] Remove license and version from __init__.py - in the next release, only api/__init__.py and apel_rest/__init__.py will be populated with licenses and versions - can't make it empty because of pep257 --- api/utils/__init__.py | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/api/utils/__init__.py b/api/utils/__init__.py index 8e9fd7c..975fd9b 100644 --- a/api/utils/__init__.py +++ b/api/utils/__init__.py @@ -1,18 +1 @@ -""" -Copyright (C) 2016 STFC. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -@author Greg Corbett -""" -__version__ = '1, 3, 0' +"""This file allows utils to be imported as a python package.""" From 0fac65142707c5dcbe992014afb3df21c6604654 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 31 Jul 2017 08:13:52 +0100 Subject: [PATCH 36/87] Mock away TokenChecker in summary view tests - have to use patch now because mock is persistant between the individual unit tests whereas patch only effects the individual unit tests --- api/tests/test_cloud_record_summary_get.py | 24 +++++++++++++--------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/api/tests/test_cloud_record_summary_get.py b/api/tests/test_cloud_record_summary_get.py index cc79ea7..8341afd 100644 --- a/api/tests/test_cloud_record_summary_get.py +++ b/api/tests/test_cloud_record_summary_get.py @@ -4,9 +4,10 @@ import MySQLdb from api.views.CloudRecordSummaryView import CloudRecordSummaryView +from api.utils.TokenChecker import TokenChecker from django.core.urlresolvers import reverse from django.test import Client, TestCase -from mock import Mock +from mock import Mock, patch QPATH_TEST = '/tmp/django-test/' @@ -18,7 +19,8 @@ def setUp(self): """Prevent logging from appearing in test output.""" logging.disable(logging.CRITICAL) - def test_cloud_record_summary_get_IAM_fail(self): + @patch.object(TokenChecker, 'valid_token_to_id') + def test_cloud_record_summary_get_IAM_fail(self, mock_valid_token_to_id): """ Test what happens if we fail to contact the IAM. @@ -29,7 +31,7 @@ def test_cloud_record_summary_get_IAM_fail(self): # Mock the functionality of the IAM # Used in the underlying GET method # Simulates a failure to translate a token to an ID - CloudRecordSummaryView._token_to_id = Mock(return_value=None) + mock_valid_token_to_id.return_value = None with self.settings(ALLOWED_FOR_GET='TestService'): # Make (and check) the GET request @@ -38,24 +40,25 @@ def test_cloud_record_summary_get_IAM_fail(self): "&from=20000101&to=20191231"), authZ_header_cont="Bearer TestToken") - def test_cloud_record_summary_get_400(self): + @patch.object(TokenChecker, 'valid_token_to_id') + def test_cloud_record_summary_get_400(self, mock_valid_token_to_id): """Test a GET request without the from field.""" # Mock the functionality of the IAM # Simulates the translation of a token to an ID # Used in the underlying GET method - CloudRecordSummaryView._token_to_id = Mock(return_value="TestService") + mock_valid_token_to_id.return_value = 'TestService' with self.settings(ALLOWED_FOR_GET='TestService'): # Make (and check) the GET request self._check_summary_get(400, options="?group=TestGroup", authZ_header_cont="Bearer TestToken") - def test_cloud_record_summary_get_403(self): - """Test an unauthorized GET request.""" + @patch.object(TokenChecker, 'valid_token_to_id') + def test_cloud_record_summary_get_403(self, mock_valid_token_to_id): # Mock the functionality of the IAM # Simulates the translation of a token to an unauthorized ID # Used in the underlying GET method - CloudRecordSummaryView._token_to_id = Mock(return_value="FakeService") + mock_valid_token_to_id.return_value = 'FakeService' with self.settings(ALLOWED_FOR_GET='TestService'): # Make (and check) the GET request @@ -79,7 +82,8 @@ def test_cloud_record_summary_get_401(self): "&from=20000101&to=20191231"), authZ_header_cont="TestToken") - def test_cloud_record_summary_get_200(self): + @patch.object(TokenChecker, 'valid_token_to_id') + def test_cloud_record_summary_get_200(self, mock_valid_token_to_id): """Test a successful GET request.""" # Connect to database database = self._connect_to_database() @@ -89,7 +93,7 @@ def test_cloud_record_summary_get_200(self): self._populate_database(database) # Mock the functionality of the IAM - CloudRecordSummaryView._token_to_id = Mock(return_value="TestService") + mock_valid_token_to_id.return_value = 'TestService' expected_response = ('{' '"count":2,' From 49100be7d71dfb587367f410e66e3c36f8932187 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Mon, 31 Jul 2017 13:01:24 +0100 Subject: [PATCH 37/87] Add additional tests to catch some edge cases --- api/tests/test_token_checker.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index 5b7699b..d2d9e6d 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -291,6 +291,23 @@ def test_is_token_json_temporally_valid(self): self._token_checker.valid_token_to_id(token), None, "Token with payload %s should not be accepted!" % payload) + def test_garbage_token(self): + """Test a garbage token is rejected.""" + token = 'ffnnsdifsdjofjfosdjfodsjfosdjofj' + result = self._token_checker.valid_token_to_id(token) + self.assertEqual(result, None) + + def test_http_issuer_ban(self): + """Test a a HTTP issuer is rejected.""" + self.assertEqual( + self._token_checker._check_token_not_revoked(None, + 'http://idc.org'), + None) + + self.assertFalse( + self._token_checker._verify_token(None, + 'http://idc.org')) + def _create_token(self, payload, key): """Return a token, signed by key, correspond to the payload.""" return jwt.encode(payload, key, algorithm='RS256') From fbf41dfe27003bf53d5d0cb5e1a10c6c90a3ab71 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 13:11:28 +0100 Subject: [PATCH 38/87] Generate standard token via a function - reduces code repetiton - can't be a class variable because the time needs to be updated everytime it is accessed. --- api/tests/test_token_checker.py | 37 ++++++++++++--------------------- 1 file changed, 13 insertions(+), 24 deletions(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index d2d9e6d..67e8e0b 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -45,12 +45,7 @@ def test_token_cache(self, mock_check_token_not_revoked, payload_list = [] # This payload will be valid as we will sign it with PRIVATE_KEY - payload = { - 'iss': 'https://iam-test.idc.eu/', - 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', - 'iat': int(time.time()) - 2000000, - 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000} + payload = self._standard_token() # Add the same token twice, this is what tests the cache functionality payload_list = [payload, payload] @@ -80,12 +75,7 @@ def test_token_cache_mis_match(self, mock_check_token_not_revoked, mock_check_token_not_revoked.return_value = CLIENT_ID # This payload will be valid as we will sign it with PRIVATE_KEY - payload1 = { - 'iss': 'https://iam-test.idc.eu/', - 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', - 'iat': int(time.time()) - 2000000, - 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000} + payload1 = self._standard_token() # This payload has a subject that will be in the cache, but this # new token is not. We need to ensure this invalid token does not @@ -122,12 +112,7 @@ def test_valid_token(self, mock_check_token_not_revoked, mock_check_token_not_revoked.return_value = CLIENT_ID # This payload will be valid as we will sign it with PRIVATE_KEY - payload = { - 'iss': 'https://iam-test.idc.eu/', - 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', - 'iat': int(time.time()) - 2000000, - 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000} + payload = self._standard_token() token = self._create_token(payload, PRIVATE_KEY) @@ -157,12 +142,7 @@ def test_verify_token(self, mock_get_issuer_public_key): # This payload would be valid if properly signed, but we are going to # sign it with FORGED_PRIVATE_KEY which will not match the PUBLIC_KEY - payload_list.append({ - 'iss': 'https://iam-test.idc.eu/', - 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', - 'iat': int(time.time()) - 2000000, - 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000}) + payload_list.append(self._standard_token()) for payload in payload_list: token = self._create_token(payload, FORGED_PRIVATE_KEY) @@ -312,6 +292,15 @@ def _create_token(self, payload, key): """Return a token, signed by key, correspond to the payload.""" return jwt.encode(payload, key, algorithm='RS256') + def _standard_token(self): + return { + 'iss': 'https://iam-test.idc.eu/', + 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', + 'iat': int(time.time()) - 2000000, + 'sub': CLIENT_ID, + 'exp': int(time.time()) + 200000} + + # Use this Client ID in tokens CLIENT_ID = 'ac2f23e0-8103-4581-8014-e0e82c486e36' From cea9c1564c7517d28e7cf0fc25066d16be358680 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 1 Aug 2017 13:17:17 +0100 Subject: [PATCH 39/87] Add pep8 and pep257 fixes to the _standard_token method --- api/tests/test_token_checker.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index 67e8e0b..c1871f2 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -293,7 +293,8 @@ def _create_token(self, payload, key): return jwt.encode(payload, key, algorithm='RS256') def _standard_token(self): - return { + """Return a token that will ve valid if properly signed.""" + return { 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, From 83664723ab393b755ff25b3f14122fb2fdc6b50c Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 09:28:04 +0100 Subject: [PATCH 40/87] Remove unneeded imports - these imports were previously used to mock a method away in that class, but now that functionality is in the TokenChecker class and is mocked using patch (rather than mock) --- api/tests/test_cloud_record_summary_get.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/api/tests/test_cloud_record_summary_get.py b/api/tests/test_cloud_record_summary_get.py index 8341afd..6c8d294 100644 --- a/api/tests/test_cloud_record_summary_get.py +++ b/api/tests/test_cloud_record_summary_get.py @@ -3,11 +3,10 @@ import logging import MySQLdb -from api.views.CloudRecordSummaryView import CloudRecordSummaryView from api.utils.TokenChecker import TokenChecker from django.core.urlresolvers import reverse from django.test import Client, TestCase -from mock import Mock, patch +from mock import patch QPATH_TEST = '/tmp/django-test/' From 9f8268493ae8373e184e315a3650fd8f4c2e8d76 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 09:34:51 +0100 Subject: [PATCH 41/87] Make TokenChecker inherit object --- api/utils/TokenChecker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index da9ebe1..d4525e0 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -12,7 +12,7 @@ from jose.exceptions import ExpiredSignatureError, JWTClaimsError, JWTError -class TokenChecker: +class TokenChecker(object): """This class contains methods to check a JWT token for validity.""" def __init__(self): From ea10240187dc20e3c2e1c666a8410ecfa77c09f4 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 10:05:40 +0100 Subject: [PATCH 42/87] Remove misleading comment as it is no longer true --- api/tests/test_token_checker.py | 1 - 1 file changed, 1 deletion(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index c1871f2..2a77778 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -10,7 +10,6 @@ from api.utils.TokenChecker import TokenChecker -# Using unittest and not django.test as no need for overhead of database class TokenCheckerTest(TestCase): """Tests the JSON Web Token validation.""" From 63ba4383df5ba97dfe655d348283f148328abb80 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 10:15:01 +0100 Subject: [PATCH 43/87] Improve comments following review --- apel_rest/settings.py | 2 +- api/tests/test_token_checker.py | 44 +++++++++++++++------------------ api/utils/TokenChecker.py | 12 ++++----- 3 files changed, 27 insertions(+), 31 deletions(-) diff --git a/apel_rest/settings.py b/apel_rest/settings.py index 9a3f0b0..358ec58 100644 --- a/apel_rest/settings.py +++ b/apel_rest/settings.py @@ -163,7 +163,7 @@ PROVIDERS_URL = 'provider_url' -# The Hostnames of IAMs that can issue access tokens for the REST interface +# List of hostnames of IAMs that can issue access tokens for the REST interface IAM_HOSTNAME_LIST = ['allowed_iams'] SERVER_IAM_ID = 'server_iam_id' SERVER_IAM_SECRET = 'server_iam_secret' diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index 2a77778..2d6abc1 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -30,7 +30,7 @@ def test_token_cache(self, mock_check_token_not_revoked, Check a cached token is granted access. Method does this by checking a token is valid twice, the first time - the token is validate and stored in a cache, the second time access + the token is validated and stored in a cache, the second time access should be granted because the token is in the cache, not because the token is valid. """ @@ -63,7 +63,7 @@ def test_token_cache_mis_match(self, mock_check_token_not_revoked, """ Check tokens with the same subject are handled correctly. - Having a token cached for the sub should not be sufficent to grant + Having a token cached for the subject should not be sufficent to grant access, the tokens must match. """ # Mock the external call to retrieve the IAM public key @@ -78,7 +78,7 @@ def test_token_cache_mis_match(self, mock_check_token_not_revoked, # This payload has a subject that will be in the cache, but this # new token is not. We need to ensure this invalid token does not - # get granted rights based only on it's sub being in the cache + # get granted rights based only on it's subject being in the cache payload2 = { 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', @@ -168,18 +168,16 @@ def test_is_token_issuer_trusted(self): """ payload_list = [] - # Add a payload without 'iss' field. - # to test we reject these as we cannot - # tell where it came from (so can't verify it) + # Test we reject a payload without 'iss' field + # as we cannot tell where it came from (so can't verify it) payload_list.append({ 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000}) - # Add a payload with a malicious 'iss' field. - # to test we reject these as we do not wantt - # to attempt to verify it + # Test we reject a payload with a malicious 'iss' field + # as we do not want to attempt to verify it payload_list.append({ 'iss': 'https://malicious-iam.idc.biz/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', @@ -212,32 +210,31 @@ def test_is_token_json_temporally_valid(self): """ payload_list = [] - # Add a payload wihtout 'iat' or 'exp' to the payload list - # to test we reject these (as we are choosing to) + # Test that we reject a payload without 'iat' or 'exp' + # as the tokens should have a lifetime payload_list.append({ 'sub': CLIENT_ID, 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '714892f5-014f-43ad-bea0-fa47579db222'}) - # Add a payload without 'exp' to the payload_list - # to test we reject these (as we are choosing to) + # Test that we reject a payload without 'exp' + # as such a token would never expire payload_list.append({ 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': CLIENT_ID}) - # Add a payload without 'iat' - # to test we reject these (as we are choosing to) + # Test that we reject a payload without 'iat' + # as all tokens should indicate when they were issued payload_list.append({ 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'sub': CLIENT_ID, 'exp': int(time.time()) + 200000}) - # Add a payload with an 'iat' and 'exp' in the past - # (e.g. they have expired) to test we are - # rejecting these + # Test that we reject a payload with an 'iat' and 'exp' + # in the past (e.g. they have expired) payload_list.append({ 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', @@ -245,9 +242,8 @@ def test_is_token_json_temporally_valid(self): 'sub': CLIENT_ID, 'exp': int(time.time()) - 200000}) - # Add a payload with an 'iat' and 'exp' in the future - # to test we are rejecting these (as we should as they - # are not yet valid) + # Test that we reject a payload with an 'iat' and 'exp' + # in the future (as we should as they are not yet valid) payload_list.append({ 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', @@ -263,7 +259,7 @@ def test_is_token_json_temporally_valid(self): self._token_checker._is_token_json_temporally_valid(payload), "Payload %s should not be accepted!" % payload) - # Assert the wrapper method valid_token_to_id reutrns + # Assert the wrapper method valid_token_to_id returns # None when passed temporally invalid tokens token = self._create_token(payload, PRIVATE_KEY) self.assertEqual( @@ -277,7 +273,7 @@ def test_garbage_token(self): self.assertEqual(result, None) def test_http_issuer_ban(self): - """Test a a HTTP issuer is rejected.""" + """Test a HTTP issuer is rejected.""" self.assertEqual( self._token_checker._check_token_not_revoked(None, 'http://idc.org'), @@ -292,7 +288,7 @@ def _create_token(self, payload, key): return jwt.encode(payload, key, algorithm='RS256') def _standard_token(self): - """Return a token that will ve valid if properly signed.""" + """Return a token that will be valid if properly signed.""" return { 'iss': 'https://iam-test.idc.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', diff --git a/api/utils/TokenChecker.py b/api/utils/TokenChecker.py index d4525e0..f2cff96 100644 --- a/api/utils/TokenChecker.py +++ b/api/utils/TokenChecker.py @@ -20,7 +20,7 @@ def __init__(self): self.logger = logging.getLogger(__name__) def valid_token_to_id(self, token): - """Introspect a token to determine it's origin.""" + """Introspect a token to determine its origin.""" try: jwt_unverified_json = jwt.get_unverified_claims(token) except JWTError: @@ -46,7 +46,7 @@ def valid_token_to_id(self, token): issuer = jwt_unverified_json['iss'] # we can pass issuer because the previous 'if' statement - # returns if jwt_unverified_json['iss'] is missing. + # returns if jwt_unverified_json['iss'] is missing. issuer = jwt_unverified_json['iss'] if not self._verify_token(token, issuer): return None @@ -58,10 +58,10 @@ def valid_token_to_id(self, token): return None self.logger.info('Token validated') - # if the execution gets here, we can cache the token - # cache is a key: value like structure with an optional timeout - # as we only need the token stored we use that as the key - # and None as the value. + # If the execution gets here, we can cache the token. + # The cache variable is a key: value like structure + # with an optional timeout, we use the token subject + # as the key and the token as the value. # Cache timeout set to 300 seconds to enable quick revocation # but also limit the number of requests to the IAM instance # Caching is also done after token validation to ensure From 637855736c0a17dd7297308bc4da283cc5495d27 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 10:27:24 +0100 Subject: [PATCH 44/87] Improve bracket indentation as per review --- api/tests/test_token_checker.py | 63 ++++++++++++++++++++++----------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index 2d6abc1..f864197 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -54,7 +54,8 @@ def test_token_cache(self, mock_check_token_not_revoked, with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertEqual( self._token_checker.valid_token_to_id(token), CLIENT_ID, - "Token with payload %s should not be accepted!" % payload) + "Token with payload %s should not be accepted!" % payload + ) @patch.object(TokenChecker, '_get_issuer_public_key') @patch.object(TokenChecker, '_check_token_not_revoked') @@ -84,7 +85,8 @@ def test_token_cache_mis_match(self, mock_check_token_not_revoked, 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': CLIENT_ID, - 'exp': int(time.time()) - 200} + 'exp': int(time.time()) - 200 + } token1 = self._create_token(payload1, PRIVATE_KEY) token2 = self._create_token(payload2, PRIVATE_KEY) @@ -92,11 +94,13 @@ def test_token_cache_mis_match(self, mock_check_token_not_revoked, with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertEqual( self._token_checker.valid_token_to_id(token1), CLIENT_ID, - "Token with payload %s should not be accepted!" % payload1) + "Token with payload %s should not be accepted!" % payload1 + ) self.assertEqual( self._token_checker.valid_token_to_id(token2), None, - "Token with payload %s should not be accepted!" % payload2) + "Token with payload %s should not be accepted!" % payload2 + ) @patch.object(TokenChecker, '_get_issuer_public_key') @patch.object(TokenChecker, '_check_token_not_revoked') @@ -119,7 +123,8 @@ def test_valid_token(self, mock_check_token_not_revoked, client_id = payload['sub'] self.assertEqual( self._token_checker.valid_token_to_id(token), client_id, - "Token with payload %s should be accepted!" % payload) + "Token with payload %s should be accepted!" % payload + ) @patch.object(TokenChecker, '_get_issuer_public_key') def test_verify_token(self, mock_get_issuer_public_key): @@ -148,11 +153,13 @@ def test_verify_token(self, mock_get_issuer_public_key): with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertFalse( self._token_checker._verify_token(token, payload['iss']), - "Payload %s should not be accepted!" % payload) + "Payload %s should not be accepted!" % payload + ) self.assertEqual( self._token_checker.valid_token_to_id(token), None, - "Token with payload %s should not be accepted!" % payload) + "Token with payload %s should not be accepted!" % payload + ) def test_is_token_issuer_trusted(self): """ @@ -174,7 +181,8 @@ def test_is_token_issuer_trusted(self): 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000}) + 'exp': int(time.time()) + 200000 + }) # Test we reject a payload with a malicious 'iss' field # as we do not want to attempt to verify it @@ -183,7 +191,8 @@ def test_is_token_issuer_trusted(self): 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000}) + 'exp': int(time.time()) + 200000 + }) for payload in payload_list: token = self._create_token(payload, PRIVATE_KEY) @@ -191,11 +200,13 @@ def test_is_token_issuer_trusted(self): with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): self.assertFalse( self._token_checker._is_token_issuer_trusted(payload), - "Payload %s should not be accepted!" % payload) + "Payload %s should not be accepted!" % payload + ) self.assertEqual( self._token_checker.valid_token_to_id(token), None, - "Token with payload %s should not be accepted!" % payload) + "Token with payload %s should not be accepted!" % payload + ) def test_is_token_json_temporally_valid(self): """ @@ -215,7 +226,8 @@ def test_is_token_json_temporally_valid(self): payload_list.append({ 'sub': CLIENT_ID, 'iss': 'https://iam-test.indigo-datacloud.eu/', - 'jti': '714892f5-014f-43ad-bea0-fa47579db222'}) + 'jti': '714892f5-014f-43ad-bea0-fa47579db222' + }) # Test that we reject a payload without 'exp' # as such a token would never expire @@ -223,7 +235,8 @@ def test_is_token_json_temporally_valid(self): 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, - 'sub': CLIENT_ID}) + 'sub': CLIENT_ID + }) # Test that we reject a payload without 'iat' # as all tokens should indicate when they were issued @@ -231,7 +244,8 @@ def test_is_token_json_temporally_valid(self): 'iss': 'https://iam-test.indigo-datacloud.eu/', 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000}) + 'exp': int(time.time()) + 200000 + }) # Test that we reject a payload with an 'iat' and 'exp' # in the past (e.g. they have expired) @@ -240,7 +254,8 @@ def test_is_token_json_temporally_valid(self): 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': CLIENT_ID, - 'exp': int(time.time()) - 200000}) + 'exp': int(time.time()) - 200000 + }) # Test that we reject a payload with an 'iat' and 'exp' # in the future (as we should as they are not yet valid) @@ -249,7 +264,8 @@ def test_is_token_json_temporally_valid(self): 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) + 200000, 'sub': CLIENT_ID, - 'exp': int(time.time()) + 2000000}) + 'exp': int(time.time()) + 2000000 + }) for payload in payload_list: # Assert the underlying helper method reponsible for @@ -257,14 +273,16 @@ def test_is_token_json_temporally_valid(self): # temporally invalid payloads self.assertFalse( self._token_checker._is_token_json_temporally_valid(payload), - "Payload %s should not be accepted!" % payload) + "Payload %s should not be accepted!" % payload + ) # Assert the wrapper method valid_token_to_id returns # None when passed temporally invalid tokens token = self._create_token(payload, PRIVATE_KEY) self.assertEqual( self._token_checker.valid_token_to_id(token), None, - "Token with payload %s should not be accepted!" % payload) + "Token with payload %s should not be accepted!" % payload + ) def test_garbage_token(self): """Test a garbage token is rejected.""" @@ -277,11 +295,13 @@ def test_http_issuer_ban(self): self.assertEqual( self._token_checker._check_token_not_revoked(None, 'http://idc.org'), - None) + None + ) self.assertFalse( self._token_checker._verify_token(None, - 'http://idc.org')) + 'http://idc.org') + ) def _create_token(self, payload, key): """Return a token, signed by key, correspond to the payload.""" @@ -294,7 +314,8 @@ def _standard_token(self): 'jti': '098cb343-c45e-490d-8aa0-ce1873cdc5f8', 'iat': int(time.time()) - 2000000, 'sub': CLIENT_ID, - 'exp': int(time.time()) + 200000} + 'exp': int(time.time()) + 200000 + } # Use this Client ID in tokens From b907f10938390e00e199c604f3058197ce8ca0c3 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 10:29:01 +0100 Subject: [PATCH 45/87] Use actual list for IAM_HOSTNAME_LIST --- api/tests/test_token_checker.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/api/tests/test_token_checker.py b/api/tests/test_token_checker.py index f864197..ef7f057 100644 --- a/api/tests/test_token_checker.py +++ b/api/tests/test_token_checker.py @@ -51,7 +51,7 @@ def test_token_cache(self, mock_check_token_not_revoked, for payload in payload_list: token = self._create_token(payload, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): + with self.settings(IAM_HOSTNAME_LIST=['iam-test.idc.eu']): self.assertEqual( self._token_checker.valid_token_to_id(token), CLIENT_ID, "Token with payload %s should not be accepted!" % payload @@ -91,7 +91,7 @@ def test_token_cache_mis_match(self, mock_check_token_not_revoked, token1 = self._create_token(payload1, PRIVATE_KEY) token2 = self._create_token(payload2, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): + with self.settings(IAM_HOSTNAME_LIST=['iam-test.idc.eu']): self.assertEqual( self._token_checker.valid_token_to_id(token1), CLIENT_ID, "Token with payload %s should not be accepted!" % payload1 @@ -119,7 +119,7 @@ def test_valid_token(self, mock_check_token_not_revoked, token = self._create_token(payload, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): + with self.settings(IAM_HOSTNAME_LIST=['iam-test.idc.eu']): client_id = payload['sub'] self.assertEqual( self._token_checker.valid_token_to_id(token), client_id, @@ -150,7 +150,7 @@ def test_verify_token(self, mock_get_issuer_public_key): for payload in payload_list: token = self._create_token(payload, FORGED_PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): + with self.settings(IAM_HOSTNAME_LIST=['iam-test.idc.eu']): self.assertFalse( self._token_checker._verify_token(token, payload['iss']), "Payload %s should not be accepted!" % payload @@ -197,7 +197,7 @@ def test_is_token_issuer_trusted(self): for payload in payload_list: token = self._create_token(payload, PRIVATE_KEY) - with self.settings(IAM_HOSTNAME_LIST='iam-test.idc.eu'): + with self.settings(IAM_HOSTNAME_LIST=['iam-test.idc.eu']): self.assertFalse( self._token_checker._is_token_issuer_trusted(payload), "Payload %s should not be accepted!" % payload From 6a7f0f65262d0295b9cff2774f6ff85d35649761 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 10:38:46 +0100 Subject: [PATCH 46/87] Add missing docstring --- api/tests/test_cloud_record_summary_get.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/tests/test_cloud_record_summary_get.py b/api/tests/test_cloud_record_summary_get.py index 6c8d294..7ca1414 100644 --- a/api/tests/test_cloud_record_summary_get.py +++ b/api/tests/test_cloud_record_summary_get.py @@ -54,6 +54,7 @@ def test_cloud_record_summary_get_400(self, mock_valid_token_to_id): @patch.object(TokenChecker, 'valid_token_to_id') def test_cloud_record_summary_get_403(self, mock_valid_token_to_id): + """Test an unauthorized service cannot make a GET request.""" # Mock the functionality of the IAM # Simulates the translation of a token to an unauthorized ID # Used in the underlying GET method From d783a9c215ce6555b6f498556e2fcd2feab9a4f2 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Wed, 6 Sep 2017 11:46:40 +0100 Subject: [PATCH 47/87] Update docker/README.md to refer to main README.md --- docker/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/README.md b/docker/README.md index 5e26a4f..8d4fe5c 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,4 +1,4 @@ # Using the Docker containers -Run the `run_container.sh` script. +This directory contains files necessary for Docker based deployment. -This will get the Docker image for the APEL REST API (including the server) and configure the persistant database container and launch an instance of the APEL Rest interface. +To find out how to run the 'APEL for Indigo DataCloud' containers, please refer to these [instructions](../README.md#running-the-docker-image-on-centos-7-and-ubuntu-1604). From c8d53faba7206ddfd726cd4eff3de60c7978b31d Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Fri, 8 Sep 2017 16:10:44 +0100 Subject: [PATCH 48/87] Include the word 'these' in the hyperlink --- docker/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/README.md b/docker/README.md index 8d4fe5c..3826f5c 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,4 +1,4 @@ # Using the Docker containers This directory contains files necessary for Docker based deployment. -To find out how to run the 'APEL for Indigo DataCloud' containers, please refer to these [instructions](../README.md#running-the-docker-image-on-centos-7-and-ubuntu-1604). +To find out how to run the 'APEL for Indigo DataCloud' containers, please refer to [these instructions](../README.md#running-the-docker-image-on-centos-7-and-ubuntu-1604). From ab0432dd3e8f13c45887a3d5a1ca5ad1e1fa7721 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 6 Sep 2017 16:25:51 +0100 Subject: [PATCH 49/87] Update Partition range for new deployments - and add a note explaining how existing deployments could be repartitioned if desired --- schemas/20-cloud-extra.sql | 92 +++++++++++++++++--------------------- 1 file changed, 41 insertions(+), 51 deletions(-) diff --git a/schemas/20-cloud-extra.sql b/schemas/20-cloud-extra.sql index 2c59b0f..0a47367 100644 --- a/schemas/20-cloud-extra.sql +++ b/schemas/20-cloud-extra.sql @@ -3,47 +3,21 @@ -- Partitioning for CloudRecords to aid query performance and monthly deletions +-- More partitions can be added by "reorganizing" the pDefault parition. E.g.: +/* +ALTER TABLE CloudRecords REORGANIZE PARTITION pDefault INTO ( + PARTITION p2017_12 VALUES LESS THAN (TO_DAYS('2018-01-01')), + PARTITION p2018_01 VALUES LESS THAN (TO_DAYS('2018-02-01')), + PARTITION p2018_02 VALUES LESS THAN (TO_DAYS('2018-03-01')), + PARTITION pDefault VALUES LESS THAN MAXVALUE +); +*/ + ALTER TABLE CloudRecords PARTITION BY RANGE (TO_DAYS(StartTime)) ( -PARTITION p2012_12 VALUES LESS THAN (TO_DAYS('2013-01-01')), -PARTITION p2013_01 VALUES LESS THAN (TO_DAYS('2013-02-01')), -PARTITION p2013_02 VALUES LESS THAN (TO_DAYS('2013-03-01')), -PARTITION p2013_03 VALUES LESS THAN (TO_DAYS('2013-04-01')), -PARTITION p2013_04 VALUES LESS THAN (TO_DAYS('2013-05-01')), -PARTITION p2013_05 VALUES LESS THAN (TO_DAYS('2013-06-01')), -PARTITION p2013_06 VALUES LESS THAN (TO_DAYS('2013-07-01')), -PARTITION p2013_07 VALUES LESS THAN (TO_DAYS('2013-08-01')), -PARTITION p2013_08 VALUES LESS THAN (TO_DAYS('2013-09-01')), -PARTITION p2013_09 VALUES LESS THAN (TO_DAYS('2013-10-01')), -PARTITION p2013_10 VALUES LESS THAN (TO_DAYS('2013-11-01')), -PARTITION p2013_11 VALUES LESS THAN (TO_DAYS('2013-12-01')), -PARTITION p2013_12 VALUES LESS THAN (TO_DAYS('2014-01-01')), -PARTITION p2014_01 VALUES LESS THAN (TO_DAYS('2014-02-01')), -PARTITION p2014_02 VALUES LESS THAN (TO_DAYS('2014-03-01')), -PARTITION p2014_03 VALUES LESS THAN (TO_DAYS('2014-04-01')), -PARTITION p2014_04 VALUES LESS THAN (TO_DAYS('2014-05-01')), -PARTITION p2014_05 VALUES LESS THAN (TO_DAYS('2014-06-01')), -PARTITION p2014_06 VALUES LESS THAN (TO_DAYS('2014-07-01')), -PARTITION p2014_07 VALUES LESS THAN (TO_DAYS('2014-08-01')), -PARTITION p2014_08 VALUES LESS THAN (TO_DAYS('2014-09-01')), -PARTITION p2014_09 VALUES LESS THAN (TO_DAYS('2014-10-01')), -PARTITION p2014_10 VALUES LESS THAN (TO_DAYS('2014-11-01')), -PARTITION p2014_11 VALUES LESS THAN (TO_DAYS('2014-12-01')), -PARTITION p2014_12 VALUES LESS THAN (TO_DAYS('2015-01-01')), -PARTITION p2015_01 VALUES LESS THAN (TO_DAYS('2015-02-01')), -PARTITION p2015_02 VALUES LESS THAN (TO_DAYS('2015-03-01')), -PARTITION p2015_03 VALUES LESS THAN (TO_DAYS('2015-04-01')), -PARTITION p2015_04 VALUES LESS THAN (TO_DAYS('2015-05-01')), -PARTITION p2015_05 VALUES LESS THAN (TO_DAYS('2015-06-01')), -PARTITION p2015_06 VALUES LESS THAN (TO_DAYS('2015-07-01')), -PARTITION p2015_07 VALUES LESS THAN (TO_DAYS('2015-08-01')), -PARTITION p2015_08 VALUES LESS THAN (TO_DAYS('2015-09-01')), -PARTITION p2015_09 VALUES LESS THAN (TO_DAYS('2015-10-01')), -PARTITION p2015_10 VALUES LESS THAN (TO_DAYS('2015-11-01')), -PARTITION p2015_11 VALUES LESS THAN (TO_DAYS('2015-12-01')), -PARTITION p2015_12 VALUES LESS THAN (TO_DAYS('2016-01-01')), -PARTITION p2016_01 VALUES LESS THAN (TO_DAYS('2016-02-01')), -PARTITION p2016_02 VALUES LESS THAN (TO_DAYS('2016-03-01')), +-- Partition pNull will contain Records with a NULL +-- (or negative) StartTime, so it will hopefully be empty +PARTITION pNull VALUES LESS THAN (0), PARTITION p2016_03 VALUES LESS THAN (TO_DAYS('2016-04-01')), PARTITION p2016_04 VALUES LESS THAN (TO_DAYS('2016-05-01')), PARTITION p2016_05 VALUES LESS THAN (TO_DAYS('2016-06-01')), @@ -53,17 +27,33 @@ PARTITION p2016_08 VALUES LESS THAN (TO_DAYS('2016-09-01')), PARTITION p2016_09 VALUES LESS THAN (TO_DAYS('2016-10-01')), PARTITION p2016_10 VALUES LESS THAN (TO_DAYS('2016-11-01')), PARTITION p2016_11 VALUES LESS THAN (TO_DAYS('2016-12-01')), -PARTITION p2017_01 VALUES LESS THAN (TO_DAYS('2017-01-01')), -PARTITION p2017_02 VALUES LESS THAN (TO_DAYS('2017-02-01')), -PARTITION p2017_03 VALUES LESS THAN (TO_DAYS('2017-03-01')), -PARTITION p2017_04 VALUES LESS THAN (TO_DAYS('2017-04-01')), -PARTITION p2017_05 VALUES LESS THAN (TO_DAYS('2017-05-01')), -PARTITION p2017_06 VALUES LESS THAN (TO_DAYS('2017-06-01')), -PARTITION p2017_07 VALUES LESS THAN (TO_DAYS('2017-07-01')), -PARTITION p2017_08 VALUES LESS THAN (TO_DAYS('2017-08-01')), -PARTITION p2017_09 VALUES LESS THAN (TO_DAYS('2017-09-01')), -PARTITION p2017_10 VALUES LESS THAN (TO_DAYS('2017-10-01')), -PARTITION p2017_11 VALUES LESS THAN (TO_DAYS('2017-11-01')), -PARTITION p2017_12 VALUES LESS THAN (TO_DAYS('2017-12-01')), +PARTITION p2016_12 VALUES LESS THAN (TO_DAYS('2017-01-01')), +PARTITION p2017_01 VALUES LESS THAN (TO_DAYS('2017-02-01')), +PARTITION p2017_02 VALUES LESS THAN (TO_DAYS('2017-03-01')), +PARTITION p2017_03 VALUES LESS THAN (TO_DAYS('2017-04-01')), +PARTITION p2017_04 VALUES LESS THAN (TO_DAYS('2017-05-01')), +PARTITION p2017_05 VALUES LESS THAN (TO_DAYS('2017-06-01')), +PARTITION p2017_06 VALUES LESS THAN (TO_DAYS('2017-07-01')), +PARTITION p2017_07 VALUES LESS THAN (TO_DAYS('2017-08-01')), +PARTITION p2017_08 VALUES LESS THAN (TO_DAYS('2017-09-01')), +PARTITION p2017_09 VALUES LESS THAN (TO_DAYS('2017-10-01')), +PARTITION p2017_10 VALUES LESS THAN (TO_DAYS('2017-11-01')), +PARTITION p2017_11 VALUES LESS THAN (TO_DAYS('2017-12-01')), +PARTITION p2017_12 VALUES LESS THAN (TO_DAYS('2018-01-01')), +PARTITION p2018_01 VALUES LESS THAN (TO_DAYS('2018-02-01')), +PARTITION p2018_02 VALUES LESS THAN (TO_DAYS('2018-03-01')), +PARTITION p2018_03 VALUES LESS THAN (TO_DAYS('2018-04-01')), +PARTITION p2018_04 VALUES LESS THAN (TO_DAYS('2018-05-01')), +PARTITION p2018_05 VALUES LESS THAN (TO_DAYS('2018-06-01')), +PARTITION p2018_06 VALUES LESS THAN (TO_DAYS('2018-07-01')), +PARTITION p2018_07 VALUES LESS THAN (TO_DAYS('2018-08-01')), +PARTITION p2018_08 VALUES LESS THAN (TO_DAYS('2018-09-01')), +PARTITION p2018_09 VALUES LESS THAN (TO_DAYS('2018-10-01')), +PARTITION p2018_10 VALUES LESS THAN (TO_DAYS('2018-11-01')), +PARTITION p2018_11 VALUES LESS THAN (TO_DAYS('2018-12-01')), +PARTITION p2018_12 VALUES LESS THAN (TO_DAYS('2019-01-01')), +PARTITION p2019_01 VALUES LESS THAN (TO_DAYS('2019-02-01')), +PARTITION p2019_02 VALUES LESS THAN (TO_DAYS('2019-03-01')), +PARTITION p2019_03 VALUES LESS THAN (TO_DAYS('2019-04-01')), PARTITION pDefault VALUES LESS THAN MAXVALUE ); From a066f742f57927bc048300c44f091b54f428637c Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 6 Sep 2017 16:27:06 +0100 Subject: [PATCH 50/87] Partition the database used by travis - doing this atleast makes sure the syntax is correct --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index dd9165a..3e22b69 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,6 +28,8 @@ before_script: # create an empty mysql database - mysql -u root -e "create database apel_rest" - mysql -u root apel_rest < schemas/10-cloud.sql + # partition the database, at least to make sure the syntax is correct + - mysql -u root apel_rest < schemas/20-cloud-extra.sql - export PYTHONPATH=$PYTHONPATH:`pwd -P` # Command to run tests From 32a40d2b714ca91b97c1a71f8cab71836725d095 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Thu, 17 Aug 2017 16:10:59 +0100 Subject: [PATCH 51/87] Add ansible playbook for deployment via ansible --- yaml/ansible.yml | 55 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 yaml/ansible.yml diff --git a/yaml/ansible.yml b/yaml/ansible.yml new file mode 100644 index 0000000..5ae820b --- /dev/null +++ b/yaml/ansible.yml @@ -0,0 +1,55 @@ +--- +- name: APEL for Indigo + hosts: localhost + tasks: + # tasks file for ansible-dockerized-apel-db + - name: Launch APEL database + docker_container: + image: mysql:5.6 + name: apel_mysql + expose: 3306 + volumes: + - /var/lib/mysql:/var/lib/mysql + - /docker/etc/mysql/conf.d:/etc/mysql/conf.d + - /schemas:/docker-entrypoint-initdb.d + env_file: /yaml/mysql.env + + # wait for the database to have finished configuring to allow the + # Server loader to start properly + - pause: + prompt: 'Configuring Database, please wait.' + minutes: 1 + + # tasks file for ansible-dockerized-apel-server + - name: Launch APEL Server container + docker_container: + image: indigodatacloud/apel:1.6.0-1 + name: apel_server + links: apel_mysql + volumes: + - /var/spool/apel/cloud:/var/spool/apel/cloud + - /docker/etc/mysql:/etc/mysql + - /docker/etc/apel:/etc/apel + - /docker/etc/cron.d/:/etc/cron.d + + # wait for the Server to have finished configuring before + # passing it messages + - pause: + prompt: 'Configuring Server, please wait.' + minutes: 1 + + # tasks file for ansible-dockerized-apel-rest-interface + - name: Launch APEL REST Interface Container + docker_container: + image: gregcorbett/rest:docker_build_fix + name: apel_rest_interface + links: apel_mysql + ports: + - "80:80" + - "443:443" + volumes: + - /var/spool/apel/cloud:/var/spool/apel/cloud + - /etc/httpd/ssl:/etc/httpd/ssl + - /docker/etc/mysql:/etc/mysql + - /docker/etc/apel:/etc/apel + env_file: /yaml/apel_rest_interface.env From 4ee09b59626514958f9b666865b268ee0158ffce Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Thu, 17 Aug 2017 16:19:59 +0100 Subject: [PATCH 52/87] Remove blank lines as they break ansible --- yaml/apel_rest_interface.env | 4 ---- yaml/mysql.env | 1 - 2 files changed, 5 deletions(-) diff --git a/yaml/apel_rest_interface.env b/yaml/apel_rest_interface.env index e2215f9..292e00f 100644 --- a/yaml/apel_rest_interface.env +++ b/yaml/apel_rest_interface.env @@ -1,21 +1,17 @@ DJANGO_SECRET_KEY=Put_a_secret_here - # Points to the JSON list of Resource Providers PROVIDERS_URL=http://indigo.cloud.plgrid.pl/cmdb/service/list - # The introspect URL for the IAM repsonsible for token based authN/authZ IAM_URLS=[\'iam-test.indigo-datacloud.eu\'] # The ID and secret of this service as registered with the sbove IAM SERVER_IAM_ID= SERVER_IAM_SECRET= - # Use these variables to revoke/grant POST rights. # Both are lists of HostDNs, these bash variables need # to be interpreted as a python list i.e [\'some_DN\'] # Remember these variables require a container restart to take effect. ALLOWED_TO_POST=[] BANNED_FROM_POST=[] - # Use this variables to grant GET rights. # It is a list of IAM tokens, this bash variable needs # to be interpreted as a python list i.e [\'some_id\'] diff --git a/yaml/mysql.env b/yaml/mysql.env index f449de1..fdf6ca3 100644 --- a/yaml/mysql.env +++ b/yaml/mysql.env @@ -2,4 +2,3 @@ MYSQL_ROOT_PASSWORD= MYSQL_DATABASE=apel_rest MYSQL_USER=apel MYSQL_PASSWORD= - From 9a62ab83b2566a2c675452dbcb4076dd601974c2 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Fri, 18 Aug 2017 09:44:55 +0100 Subject: [PATCH 53/87] Update README.md to include ansible deployment --- README.md | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 24857d8..ea49a29 100644 --- a/README.md +++ b/README.md @@ -20,12 +20,16 @@ It is currently expected that only the QoS/SLA tool will interact with these sum - Accept APEL-Cloud v0.2 and v0.4 usage records via POST requests to the REST endpoint `.../api/v1/cloud/record` - Provide access to summaries via GET requests to REST endpoint `.../api/v1/cloud/record/summary` -## Running the docker image on Centos 7 and Ubuntu 16.04 -We recommend using the docker image to run the Accounting server and REST interface. As such, having Docker and docker-compose installed is a prerequisite. +## Running the Docker image on Centos 7 and Ubuntu 16.04 +We recommend using the Docker image to run the Accounting server and REST interface. As such, having Docker installed is a prerequisite. See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/ubuntulinux/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/centos/) for details of how to install Docker. -See [Install Docker Compose](https://docs.docker.com/compose/install/) for details of how to install docker-compose +We also reccomend using Docker Compose or Anisble to deploy the containers. +* See [Install Docker Compose](https://docs.docker.com/compose/install/) for details of how to install Docker Compose. +* See [Install Ansible ](http://docs.ansible.com/ansible/latest/intro_installation.html#installation) for details of how to install Ansible. + +The instructions below are for both Docker Compose and Ansibe. 1. Download the source code for the version you wish to deploy, see [here](https://github.com/indigo-dc/Accounting/releases) for a list of releases and corresponding docker image tag. The source code contains schemas and yaml files needed for deploying via docker. @@ -79,11 +83,23 @@ See [Install Docker Compose](https://docs.docker.com/compose/install/) for detai chown -R root docker/etc/cron.d ``` -8. Launch the containers. It is recommeded to wait 1 minute in order for each container to configure fully before launching the next +8. Launch the containers. + +- With Docker Compose: + + It is recommeded to wait 1 minute in order for each container to configure fully before launching the next ``` docker-compose -f yaml/docker-compose.yaml up -d --force-recreate apel_mysql + # Wait 1 minute docker-compose -f yaml/docker-compose.yaml up -d --force-recreate apel_server + # Wait 1 minute docker-compose -f yaml/docker-compose.yaml up -d --force-recreate apel_rest_interface ``` +- With Ansible: + + ``` + ansible-playbook yaml/ansible.yml + ``` + 9. Navigate a web browser to `https:///api/v1/cloud/record/summary` From 7ebb7d6fc33130eb6e961f018bd8e1c3af068db0 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Fri, 18 Aug 2017 11:05:44 +0100 Subject: [PATCH 54/87] Add note to modify ansible file before running --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ea49a29..19c17e4 100644 --- a/README.md +++ b/README.md @@ -98,6 +98,7 @@ The instructions below are for both Docker Compose and Ansibe. - With Ansible: + First, replace `` in `yaml/ansible.yaml` with the absoulte file path of inflated source code, then run the below command. ``` ansible-playbook yaml/ansible.yml ``` From 19799c41577fb7a786fd14782af5f07177041837 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 23 Aug 2017 16:13:36 +0100 Subject: [PATCH 55/87] Use most recent indigodatacloud tag to deploy --- yaml/ansible.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yaml/ansible.yml b/yaml/ansible.yml index 5ae820b..9d04b2a 100644 --- a/yaml/ansible.yml +++ b/yaml/ansible.yml @@ -41,7 +41,7 @@ # tasks file for ansible-dockerized-apel-rest-interface - name: Launch APEL REST Interface Container docker_container: - image: gregcorbett/rest:docker_build_fix + image: indigodatacloud/accounting:1.4.0-1 name: apel_rest_interface links: apel_mysql ports: From ed82623902b5d89849df4006ccd245a0ac890c56 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 13 Sep 2017 10:19:51 +0100 Subject: [PATCH 56/87] Reword comment about running the ansible commands --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 19c17e4..6eb88d7 100644 --- a/README.md +++ b/README.md @@ -98,7 +98,7 @@ The instructions below are for both Docker Compose and Ansibe. - With Ansible: - First, replace `` in `yaml/ansible.yaml` with the absoulte file path of inflated source code, then run the below command. + First, replace `` in `yaml/ansible.yaml` with the absolute file path of the inflated source code, then run the command below. ``` ansible-playbook yaml/ansible.yml ``` From 90d733f352491cf90c5de424744d27470bf752b4 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 13 Sep 2017 10:21:38 +0100 Subject: [PATCH 57/87] Add note that source code also needed for ansible --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6eb88d7..106ddb7 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ We also reccomend using Docker Compose or Anisble to deploy the containers. The instructions below are for both Docker Compose and Ansibe. -1. Download the source code for the version you wish to deploy, see [here](https://github.com/indigo-dc/Accounting/releases) for a list of releases and corresponding docker image tag. The source code contains schemas and yaml files needed for deploying via docker. +1. Download the source code for the version you wish to deploy, see [here](https://github.com/indigo-dc/Accounting/releases) for a list of releases and corresponding docker image tag. The source code contains schemas and yaml files needed for deploying via docker-compose or ansible. 2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [here](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions. From 9e4282d7d2a3910e92790e45a45f53c31b29cdae Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 13 Sep 2017 11:04:40 +0100 Subject: [PATCH 58/87] Remove wait after deploying Server - As the REST interface only requires the database's configuration be finished - can't remove the wait after deploying the database as it reports it's ready when the port is opened which isn't the same as ready to accept connections. --- README.md | 3 +-- yaml/ansible.yml | 6 ------ 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/README.md b/README.md index 106ddb7..c40e751 100644 --- a/README.md +++ b/README.md @@ -87,12 +87,11 @@ The instructions below are for both Docker Compose and Ansibe. - With Docker Compose: - It is recommeded to wait 1 minute in order for each container to configure fully before launching the next + It is recommeded to wait 1 minute after launching the database in order for it to configure fully before launching the APEL containers ``` docker-compose -f yaml/docker-compose.yaml up -d --force-recreate apel_mysql # Wait 1 minute docker-compose -f yaml/docker-compose.yaml up -d --force-recreate apel_server - # Wait 1 minute docker-compose -f yaml/docker-compose.yaml up -d --force-recreate apel_rest_interface ``` diff --git a/yaml/ansible.yml b/yaml/ansible.yml index 9d04b2a..fcd3390 100644 --- a/yaml/ansible.yml +++ b/yaml/ansible.yml @@ -32,12 +32,6 @@ - /docker/etc/apel:/etc/apel - /docker/etc/cron.d/:/etc/cron.d - # wait for the Server to have finished configuring before - # passing it messages - - pause: - prompt: 'Configuring Server, please wait.' - minutes: 1 - # tasks file for ansible-dockerized-apel-rest-interface - name: Launch APEL REST Interface Container docker_container: From ce256a723125e0722f15780feb1e741d25e0622b Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 13 Sep 2017 11:08:07 +0100 Subject: [PATCH 59/87] Change 'task files' to 'tasks' - as that seems to be the correct Ansible terminology --- yaml/ansible.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yaml/ansible.yml b/yaml/ansible.yml index fcd3390..535d6f6 100644 --- a/yaml/ansible.yml +++ b/yaml/ansible.yml @@ -2,7 +2,7 @@ - name: APEL for Indigo hosts: localhost tasks: - # tasks file for ansible-dockerized-apel-db + # tasks for ansible-dockerized-apel-db - name: Launch APEL database docker_container: image: mysql:5.6 @@ -20,7 +20,7 @@ prompt: 'Configuring Database, please wait.' minutes: 1 - # tasks file for ansible-dockerized-apel-server + # tasks for ansible-dockerized-apel-server - name: Launch APEL Server container docker_container: image: indigodatacloud/apel:1.6.0-1 @@ -32,7 +32,7 @@ - /docker/etc/apel:/etc/apel - /docker/etc/cron.d/:/etc/cron.d - # tasks file for ansible-dockerized-apel-rest-interface + # tasks for ansible-dockerized-apel-rest-interface - name: Launch APEL REST Interface Container docker_container: image: indigodatacloud/accounting:1.4.0-1 From 6e7b96caea13ffde321fc172190ad28ccd88fd20 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 13 Sep 2017 11:09:32 +0100 Subject: [PATCH 60/87] Use generic term for the extracted source code --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c40e751..ab5956b 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,7 @@ The instructions below are for both Docker Compose and Ansibe. - With Ansible: - First, replace `` in `yaml/ansible.yaml` with the absolute file path of the inflated source code, then run the command below. + First, replace `` in `yaml/ansible.yaml` with the absolute file path of the decompressed source code, then run the command below. ``` ansible-playbook yaml/ansible.yml ``` From 76bf217160a066160b00c461f6c0ef1d1142359a Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 13 Sep 2017 11:19:03 +0100 Subject: [PATCH 61/87] Change 'tasks' to 'task' - because each one (database, server, REST) is an individual task --- yaml/ansible.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yaml/ansible.yml b/yaml/ansible.yml index 535d6f6..01e6dec 100644 --- a/yaml/ansible.yml +++ b/yaml/ansible.yml @@ -2,7 +2,7 @@ - name: APEL for Indigo hosts: localhost tasks: - # tasks for ansible-dockerized-apel-db + # task for ansible-dockerized-apel-db - name: Launch APEL database docker_container: image: mysql:5.6 @@ -20,7 +20,7 @@ prompt: 'Configuring Database, please wait.' minutes: 1 - # tasks for ansible-dockerized-apel-server + # task for ansible-dockerized-apel-server - name: Launch APEL Server container docker_container: image: indigodatacloud/apel:1.6.0-1 @@ -32,7 +32,7 @@ - /docker/etc/apel:/etc/apel - /docker/etc/cron.d/:/etc/cron.d - # tasks for ansible-dockerized-apel-rest-interface + # task for ansible-dockerized-apel-rest-interface - name: Launch APEL REST Interface Container docker_container: image: indigodatacloud/accounting:1.4.0-1 From 9df688698d1e76c68fa3f45be08c97827acd3247 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 09:05:38 +0100 Subject: [PATCH 62/87] Update Ubuntu installation link to CE version - As the old link now re-directs to the EE version requiring a docker log in --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ab5956b..c010b2f 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ It is currently expected that only the QoS/SLA tool will interact with these sum ## Running the Docker image on Centos 7 and Ubuntu 16.04 We recommend using the Docker image to run the Accounting server and REST interface. As such, having Docker installed is a prerequisite. -See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/ubuntulinux/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/centos/) for details of how to install Docker. +See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/centos/) for details of how to install Docker. We also reccomend using Docker Compose or Anisble to deploy the containers. * See [Install Docker Compose](https://docs.docker.com/compose/install/) for details of how to install Docker Compose. From 6a23eab0c5b3744568b3bbce7ec1ade54e4e07a4 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 09:09:41 +0100 Subject: [PATCH 63/87] Update Centos installation link to CE version - to match Ubuntu instructions and most installations will be using the community edition of docker --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c010b2f..f953fab 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ It is currently expected that only the QoS/SLA tool will interact with these sum ## Running the Docker image on Centos 7 and Ubuntu 16.04 We recommend using the Docker image to run the Accounting server and REST interface. As such, having Docker installed is a prerequisite. -See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/centos/) for details of how to install Docker. +See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/centos/) for details of how to install Docker. We also reccomend using Docker Compose or Anisble to deploy the containers. * See [Install Docker Compose](https://docs.docker.com/compose/install/) for details of how to install Docker Compose. From f04ad53bbb05ccfc298ad5d5f9daf8862442adb8 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 11:38:53 +0100 Subject: [PATCH 64/87] Clarify that Post-installation steps aren't needed - nor assumed by the following instructions --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f953fab..19a66ff 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ It is currently expected that only the QoS/SLA tool will interact with these sum ## Running the Docker image on Centos 7 and Ubuntu 16.04 We recommend using the Docker image to run the Accounting server and REST interface. As such, having Docker installed is a prerequisite. -See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/centos/) for details of how to install Docker. +See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/centos/) for details of how to install Docker. There is no requirement to undertake the 'Post-installation steps for Linux'. We also reccomend using Docker Compose or Anisble to deploy the containers. * See [Install Docker Compose](https://docs.docker.com/compose/install/) for details of how to install Docker Compose. From 28650df51f9c71459f38e3b2004f1b9e7b2f5478 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 11:43:05 +0100 Subject: [PATCH 65/87] Clarify root ownership of docker/etc/cron.d - example command now includes sudo as that will likely be needed for the command to work --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 19a66ff..85c8abb 100644 --- a/README.md +++ b/README.md @@ -78,9 +78,9 @@ The instructions below are for both Docker Compose and Ansibe. 6. Before the REST interface will start, a certificate needs to be added to the container. This can be done by placing a certificate (`apache.crt`) and key (`apache.key`) under `/etc/httpd/ssl/`. This directory will be mounted into the container by docker-compose. -7. Make `docker/etc/cron.d` owned by `root`. This is required because this directory gets mounted into the container and it needs to be owned by root for cron jobs in the container to run. +7. If you are running Docker as a non-root user, you will need to make `docker/etc/cron.d` owned by `root`. This is because this directory gets mounted into the container and it needs to be owned by `root` for cron jobs in the container to run. ``` - chown -R root docker/etc/cron.d + sudo chown -R root docker/etc/cron.d ``` 8. Launch the containers. From 2119495372943070aae84cfb9edcf153fe4d5be2 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 14:23:03 +0100 Subject: [PATCH 66/87] Clarify the downloading of the source code --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 85c8abb..4e1a713 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ We also reccomend using Docker Compose or Anisble to deploy the containers. The instructions below are for both Docker Compose and Ansibe. -1. Download the source code for the version you wish to deploy, see [here](https://github.com/indigo-dc/Accounting/releases) for a list of releases and corresponding docker image tag. The source code contains schemas and yaml files needed for deploying via docker-compose or ansible. +1. Download the compressed source code from [here](https://github.com/indigo-dc/Accounting/releases/latest), it does not matter where the source code is downloaded to. As the source code contains schemas and yaml files needed for deploying via docker-compose or ansible, you will need to unzip the source code and then `cd` into the inflated directory. 2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [here](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions. From 79661a6ec4da95d8b9845ff094f9dd99a6ffedca Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 14:51:13 +0100 Subject: [PATCH 67/87] Update IAM instructions to reference new homepage - as the IAM interface has changed since the instructions were initially written --- doc/admin.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/admin.md b/doc/admin.md index d27918a..049df02 100644 --- a/doc/admin.md +++ b/doc/admin.md @@ -71,7 +71,7 @@ You should now have terminal access to the Accounting Server. ## Register the service as a protected resource with the Indigo Identity Access Management (IAM) -1. On the [IAM homepage](https://iam-test.indigo-datacloud.eu/login), click "Self Service Protected Resource Registration". +1. On the [IAM homepage](https://iam-test.indigo-datacloud.eu/dashboard#/home), click "MitreID Dashboard" and then click "Self Service Protected Resource Registration". 2. On the "Main" tab, give this resource an appropriate Client Name. From 16c61a20e2f0dfda5e3daab80657c45863bec00e Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 15:09:27 +0100 Subject: [PATCH 68/87] Add additional click statement in IAM registration --- doc/admin.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/admin.md b/doc/admin.md index 049df02..20e7a98 100644 --- a/doc/admin.md +++ b/doc/admin.md @@ -71,7 +71,7 @@ You should now have terminal access to the Accounting Server. ## Register the service as a protected resource with the Indigo Identity Access Management (IAM) -1. On the [IAM homepage](https://iam-test.indigo-datacloud.eu/dashboard#/home), click "MitreID Dashboard" and then click "Self Service Protected Resource Registration". +1. On the [IAM homepage](https://iam-test.indigo-datacloud.eu/dashboard#/home), click "MitreID Dashboard", then click "Self Service Protected Resource Registration" and then click "New Resource". 2. On the "Main" tab, give this resource an appropriate Client Name. From 266af73502d8f2b817a8b093edabdd2b66f745e6 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 15:16:27 +0100 Subject: [PATCH 69/87] Clarify the scope of the 'here' link - to try and prevent people reading on into the 'Authorize new PaaS' section --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4e1a713..6742b10 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ The instructions below are for both Docker Compose and Ansibe. 1. Download the compressed source code from [here](https://github.com/indigo-dc/Accounting/releases/latest), it does not matter where the source code is downloaded to. As the source code contains schemas and yaml files needed for deploying via docker-compose or ansible, you will need to unzip the source code and then `cd` into the inflated directory. -2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [here](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions. +2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [Register the service as a protected resource with the Indigo Identity Access Management (IAM)](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions. 3. Populate the following variables in `yaml/apel_rest_interface.env` ``` From 90df276357db1cb16df7f44e85581eafb21e1786 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 15:25:27 +0100 Subject: [PATCH 70/87] Add 'ALLOWED_TO_POST` to the overview --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6742b10..25dc681 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ APEL Cloud Accounting can account for the usage of OpenNebula and OpenStack inst The collectors produce "Usage Records" in the APEL-Cloud v0.2 or v0.4 message formats. Information about these format can be found [here](https://wiki.egi.eu/wiki/Federated_Cloud_Accounting#Documentation). -These records need to be sent as POST requests to the REST endpoint `.../api/v1/cloud/record`, where `...` is the machine hosting the docker image. A POST request requires an X.509 certificate to authenticate the request. The hostname, which should be the same as the common name (CN) contained in the X.509 certificate, must be listed as a provider [here](http://indigo.cloud.plgrid.pl/cmdb/service/list) for the request to be authorized. +These records need to be sent as POST requests to the REST endpoint `.../api/v1/cloud/record`, where `...` is the machine hosting the docker image. A POST request requires an X.509 certificate to authenticate the request. The hostname, which should be the same as the common name (CN) contained in the X.509 certificate, must be listed as a provider [here](http://indigo.cloud.plgrid.pl/cmdb/service/list) or be given special access via the `ALLOWED_TO_POST` enviroment variable for the request to be authorized. Accepted records are summarised twice daily. These summaries can be accessed with a GET request to `.../api/v1/cloud/record/summary`. Summaries can be filtered using `key=value` pairs. See [Supported key=value pairs](doc/user.md#supported-keyvalue-pairs) for a list of valid supported `key=value` pairs. A GET request requires an IAM access token be included in the request. This token is then sent to the IAM to authenticate the ID of the service requesting access to the summary. This ID needs to be in `ALLOWED_FOR_GET` in `yaml/apel_rest_interface.env` for access to be authorized. See [Authorize new WP5 components to view Summaries](doc/admin.md#authorize-new-wp5-components-to-view-summaries) for instructions on adding service to `ALLOWED_FOR_GET` From d2a4f84b2d0debd752c4601ac202a407eaa1a425 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 15:26:43 +0100 Subject: [PATCH 71/87] Remove dead link from overview - it was too deployment orientated anyway, focusing on the kubernetes deployment --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 25dc681..77f0d6e 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ The collectors produce "Usage Records" in the APEL-Cloud v0.2 or v0.4 message fo These records need to be sent as POST requests to the REST endpoint `.../api/v1/cloud/record`, where `...` is the machine hosting the docker image. A POST request requires an X.509 certificate to authenticate the request. The hostname, which should be the same as the common name (CN) contained in the X.509 certificate, must be listed as a provider [here](http://indigo.cloud.plgrid.pl/cmdb/service/list) or be given special access via the `ALLOWED_TO_POST` enviroment variable for the request to be authorized. -Accepted records are summarised twice daily. These summaries can be accessed with a GET request to `.../api/v1/cloud/record/summary`. Summaries can be filtered using `key=value` pairs. See [Supported key=value pairs](doc/user.md#supported-keyvalue-pairs) for a list of valid supported `key=value` pairs. A GET request requires an IAM access token be included in the request. This token is then sent to the IAM to authenticate the ID of the service requesting access to the summary. This ID needs to be in `ALLOWED_FOR_GET` in `yaml/apel_rest_interface.env` for access to be authorized. See [Authorize new WP5 components to view Summaries](doc/admin.md#authorize-new-wp5-components-to-view-summaries) for instructions on adding service to `ALLOWED_FOR_GET` +Accepted records are summarised twice daily. These summaries can be accessed with a GET request to `.../api/v1/cloud/record/summary`. Summaries can be filtered using `key=value` pairs. See [Supported key=value pairs](doc/user.md#supported-keyvalue-pairs) for a list of valid supported `key=value` pairs. A GET request requires an IAM access token be included in the request. This token is then sent to the IAM to authenticate the ID of the service requesting access to the summary. This ID needs to be in `ALLOWED_FOR_GET` in `yaml/apel_rest_interface.env` for access to be authorized. It is currently expected that only the QoS/SLA tool will interact with these summaries. From 269a267b1fd19d5984535688ff8b1a5baaef663d Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 15:28:41 +0100 Subject: [PATCH 72/87] Remove specific reference to a yaml file - as the precise file will change depending on deployment method --- doc/admin.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/admin.md b/doc/admin.md index 20e7a98..fb9ab6b 100644 --- a/doc/admin.md +++ b/doc/admin.md @@ -77,7 +77,7 @@ You should now have terminal access to the Accounting Server. 3. Click Save. -4. Store the ClientID, Client Secret, and Registration Access Token; as the ID and Secret will need to be put in `yaml/accounting-rest-interface-rc.yaml`, and the token will be needed to make further modifications to this registration. +4. Store the ClientID, Client Secret, and Registration Access Token; as the ID and Secret will need to be put into the appropriate yaml file later, and the token will be needed to make further modifications to this registration. ## Authorize new PaaS (Platform as a Service) Platform components to view Summaries From f95230c440baeb22b65cddaca0a20c35fc9d50f4 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 15:29:55 +0100 Subject: [PATCH 73/87] Further clarify the scope of this 'segway' --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 77f0d6e..e290f11 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ The instructions below are for both Docker Compose and Ansibe. 1. Download the compressed source code from [here](https://github.com/indigo-dc/Accounting/releases/latest), it does not matter where the source code is downloaded to. As the source code contains schemas and yaml files needed for deploying via docker-compose or ansible, you will need to unzip the source code and then `cd` into the inflated directory. -2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [Register the service as a protected resource with the Indigo Identity Access Management (IAM)](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions. +2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [Register the service as a protected resource with the Indigo Identity Access Management (IAM)](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions, before proceeding to step 3. 3. Populate the following variables in `yaml/apel_rest_interface.env` ``` From 1de565fb620a8099cd738254d5507e529e4410c3 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 16:16:21 +0100 Subject: [PATCH 74/87] Remove duplicate SERVER_IAM_* descriptions --- README.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.md b/README.md index e290f11..7dd0103 100644 --- a/README.md +++ b/README.md @@ -57,10 +57,6 @@ The instructions below are for both Docker Compose and Ansibe. BANNED_FROM_POST: A (Python) list of X.509 HostDNs banned from submitting POST requests, even if they are listed by the PROVIDERS_URL. (e.g. ['/C=XX/O=XX/OU=XX/L=XX/CN=banned_host.test']) - - SERVER_IAM_ID: An IAM ID corresponding to this instance, used to validate tokens. - - SERVER_IAM_SECRET: An IAM secret corresponding to this instance, used to validate tokens. ``` 4. Populate the following variables in `yaml/mysql.env` From 6729491ec81d9b7954a054f7cc70ce94e3b33b3f Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 17 Aug 2017 16:32:17 +0100 Subject: [PATCH 75/87] Explain what the secret key is and how it's used --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7dd0103..afd459e 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,10 @@ The instructions below are for both Docker Compose and Ansibe. 3. Populate the following variables in `yaml/apel_rest_interface.env` ``` - DJANGO_SECRET_KEY: The Django server requires its own "secret". + DJANGO_SECRET_KEY: The Django server requires its own "secret". A typical secret key + will be a 50 character long random sequence of alphanumeric + and special characters. This secret will be used for JSON object + signing and other cryptographic functions like creating CSRF keys. PROVIDERS_URL: Points to the JSON list of Resource Providers From 9608d3c68a192198d984002401acb086a3b0d207 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Fri, 18 Aug 2017 09:20:47 +0100 Subject: [PATCH 76/87] Order variables to match apel_rest_interface.env - otherwise someone may add values in the wrong order --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index afd459e..07f289b 100644 --- a/README.md +++ b/README.md @@ -49,10 +49,7 @@ The instructions below are for both Docker Compose and Ansibe. SERVER_IAM_ID: An IAM ID corresponding to this instance, used to validate tokens. SERVER_IAM_SECRET: An IAM secret corresponding to this instance, used to validate tokens. - - ALLOWED_FOR_GET: A (Python) list of IAM service IDs allowed to submit GET requests. - (e.g. ['ac2f23e0-8103-4581-8014-e0e82c486e36']) - + ALLOWED_TO_POST: A (Python) list of X.509 HostDNs allowed to submit POST requests, in addition to those listed by the PROVIDERS_URL. (e.g. ['/C=XX/O=XX/OU=XX/L=XX/CN=special_host.test']) @@ -60,6 +57,9 @@ The instructions below are for both Docker Compose and Ansibe. BANNED_FROM_POST: A (Python) list of X.509 HostDNs banned from submitting POST requests, even if they are listed by the PROVIDERS_URL. (e.g. ['/C=XX/O=XX/OU=XX/L=XX/CN=banned_host.test']) + + ALLOWED_FOR_GET: A (Python) list of IAM service IDs allowed to submit GET requests. + (e.g. ['ac2f23e0-8103-4581-8014-e0e82c486e36']) ``` 4. Populate the following variables in `yaml/mysql.env` From f442963f1e2f619329b271ffd40eebd3f1f33857 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Tue, 22 Aug 2017 12:49:47 +0100 Subject: [PATCH 77/87] Add note saying cert dir may need to be created --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 07f289b..f35ad9f 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ The instructions below are for both Docker Compose and Ansibe. 5. `MYSQL_PASSWORD` will also need to be added to the password field `docker/etc/apel/clouddb.cfg` and `docker/etc/mysql/my.cnf` -6. Before the REST interface will start, a certificate needs to be added to the container. This can be done by placing a certificate (`apache.crt`) and key (`apache.key`) under `/etc/httpd/ssl/`. This directory will be mounted into the container by docker-compose. +6. Before the REST interface will start, a certificate needs to be added to the container. This can be done by placing a certificate (`apache.crt`) and key (`apache.key`) under `/etc/httpd/ssl/` on the host machine. This directory will be mounted into the container by docker-compose. Note: You may need to create the `/etc/httpd/ssl/` directory. 7. If you are running Docker as a non-root user, you will need to make `docker/etc/cron.d` owned by `root`. This is because this directory gets mounted into the container and it needs to be owned by `root` for cron jobs in the container to run. ``` From 550f29bc0600d54e1fa1d87b4757ae12d681d1f3 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Tue, 22 Aug 2017 14:16:07 +0100 Subject: [PATCH 78/87] Add note about crt/key file permissions --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f35ad9f..d6c65c0 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ The instructions below are for both Docker Compose and Ansibe. 5. `MYSQL_PASSWORD` will also need to be added to the password field `docker/etc/apel/clouddb.cfg` and `docker/etc/mysql/my.cnf` -6. Before the REST interface will start, a certificate needs to be added to the container. This can be done by placing a certificate (`apache.crt`) and key (`apache.key`) under `/etc/httpd/ssl/` on the host machine. This directory will be mounted into the container by docker-compose. Note: You may need to create the `/etc/httpd/ssl/` directory. +6. Before the REST interface will start, a certificate needs to be added to the container. This can be done by placing a certificate (`apache.crt`) and key (`apache.key`) under `/etc/httpd/ssl/` on the host machine (be careful to preserve correct `.crt`/`.key` file permissions). `/etc/httpd/ssl/` will be mounted into the container by docker-compose. Note: You may need to create the `/etc/httpd/ssl/` directory. 7. If you are running Docker as a non-root user, you will need to make `docker/etc/cron.d` owned by `root`. This is because this directory gets mounted into the container and it needs to be owned by `root` for cron jobs in the container to run. ``` From b81a5f4e40253b5a2f140c7d67844751686b64c0 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 26 Sep 2017 13:22:38 +0100 Subject: [PATCH 79/87] Add note refering user to the CDMI documentation --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d6c65c0..3e6c89b 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ APEL Cloud Accounting can account for the usage of OpenNebula and OpenStack inst The collectors produce "Usage Records" in the APEL-Cloud v0.2 or v0.4 message formats. Information about these format can be found [here](https://wiki.egi.eu/wiki/Federated_Cloud_Accounting#Documentation). -These records need to be sent as POST requests to the REST endpoint `.../api/v1/cloud/record`, where `...` is the machine hosting the docker image. A POST request requires an X.509 certificate to authenticate the request. The hostname, which should be the same as the common name (CN) contained in the X.509 certificate, must be listed as a provider [here](http://indigo.cloud.plgrid.pl/cmdb/service/list) or be given special access via the `ALLOWED_TO_POST` enviroment variable for the request to be authorized. +These records need to be sent as POST requests to the REST endpoint `.../api/v1/cloud/record`, where `...` is the machine hosting the docker image. A POST request requires an X.509 certificate to authenticate the request. The hostname, which should be the same as the common name (CN) contained in the X.509 certificate, must be listed as a provider [here](http://indigo.cloud.plgrid.pl/cmdb/service/list) or be given special access via the `ALLOWED_TO_POST` enviroment variable for the request to be authorized. For details of how to be added to the CMDB service list, please refer to the [CMDB documentation](https://github.com/indigo-dc/cmdb/blob/master/README.md). Accepted records are summarised twice daily. These summaries can be accessed with a GET request to `.../api/v1/cloud/record/summary`. Summaries can be filtered using `key=value` pairs. See [Supported key=value pairs](doc/user.md#supported-keyvalue-pairs) for a list of valid supported `key=value` pairs. A GET request requires an IAM access token be included in the request. This token is then sent to the IAM to authenticate the ID of the service requesting access to the summary. This ID needs to be in `ALLOWED_FOR_GET` in `yaml/apel_rest_interface.env` for access to be authorized. From 33854e60a5b62f6db405e799968f61162f446465 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 26 Sep 2017 13:33:36 +0100 Subject: [PATCH 80/87] Remove wordy and repetitive hyperlink --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3e6c89b..f622b27 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ The instructions below are for both Docker Compose and Ansibe. 1. Download the compressed source code from [here](https://github.com/indigo-dc/Accounting/releases/latest), it does not matter where the source code is downloaded to. As the source code contains schemas and yaml files needed for deploying via docker-compose or ansible, you will need to unzip the source code and then `cd` into the inflated directory. -2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [Register the service as a protected resource with the Indigo Identity Access Management (IAM)](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions, before proceeding to step 3. +2. Register the service as a protected resource with the Indigo Identity Access Management (IAM) service. See [this section](doc/admin.md#register-the-service-as-a-protected-resource-with-the-indigo-identity-access-management-iam) for instructions, before proceeding to step 3. 3. Populate the following variables in `yaml/apel_rest_interface.env` ``` From 5877c0d81e19bce9572a5abded602f7d922ffcf5 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Tue, 26 Sep 2017 14:55:53 +0100 Subject: [PATCH 81/87] Rework single step into bullet-pointed sub-list --- doc/admin.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/admin.md b/doc/admin.md index fb9ab6b..7ee0887 100644 --- a/doc/admin.md +++ b/doc/admin.md @@ -71,7 +71,10 @@ You should now have terminal access to the Accounting Server. ## Register the service as a protected resource with the Indigo Identity Access Management (IAM) -1. On the [IAM homepage](https://iam-test.indigo-datacloud.eu/dashboard#/home), click "MitreID Dashboard", then click "Self Service Protected Resource Registration" and then click "New Resource". +1. On the [IAM homepage](https://iam-test.indigo-datacloud.eu/dashboard#/home): + * click "MitreID Dashboard" + * click "Self Service Protected Resource Registration" + * click "New Resource". 2. On the "Main" tab, give this resource an appropriate Client Name. From 9cd1942057170d92bd2d711dd102b5e348260091 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Tue, 26 Sep 2017 15:03:08 +0100 Subject: [PATCH 82/87] Reword hyperlink so it's clearer it's external --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index f622b27..09e5523 100644 --- a/README.md +++ b/README.md @@ -25,9 +25,9 @@ We recommend using the Docker image to run the Accounting server and REST interf See [Ubuntu 16.04 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/) or [Centos 7 Instructions](https://docs.docker.com/engine/installation/linux/docker-ce/centos/) for details of how to install Docker. There is no requirement to undertake the 'Post-installation steps for Linux'. -We also reccomend using Docker Compose or Anisble to deploy the containers. -* See [Install Docker Compose](https://docs.docker.com/compose/install/) for details of how to install Docker Compose. -* See [Install Ansible ](http://docs.ansible.com/ansible/latest/intro_installation.html#installation) for details of how to install Ansible. +We also recommend using Docker Compose or Anisble to deploy the containers. +* See [the offical Docker Compose documentation](https://docs.docker.com/compose/install/) for details of how to install docker-compose. +* See [the offical Ansible documentation](http://docs.ansible.com/ansible/latest/intro_installation.html#installation) for details of how to install Ansible. The instructions below are for both Docker Compose and Ansibe. From 8a04f95b2f757e4f6c825af497bc20cefdae096d Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 27 Sep 2017 16:16:41 +0100 Subject: [PATCH 83/87] Remove run_container.sh script - because it is no longer used to deploy the services - use docker-compose or ansible instead --- docker/run_container.sh | 136 ---------------------------------------- 1 file changed, 136 deletions(-) delete mode 100755 docker/run_container.sh diff --git a/docker/run_container.sh b/docker/run_container.sh deleted file mode 100755 index 46cb3b7..0000000 --- a/docker/run_container.sh +++ /dev/null @@ -1,136 +0,0 @@ -#!/bin/bash -set -eu - -############ MODIFY THESE VARIABLES ############# - -# The password for the APEL user. -# NEEDS POPULATING -# EVEN IF NOT RE-DEPLOYING A DATABASE -# As it is needed to access the deployed database -MYSQL_PASSWORD= - -# A (python) list of IAM service IDs allowed to submit GET requests. -# This bash variable needs to be interpreted by python as a list of strings -# i.e. [\'ac2f23e0-8103-4581-8014-e0e82c486e36\'] -# NEEDS POPULATING -ALLOWED_FOR_GET= - -# An IAM ID corresponding to this instance, used to validate tokens. -# NEEDS POPULATING -SERVER_IAM_ID= - -# An IAM secret corresponding to this instance, used to validate tokens. -# NEEDS POPULATING -SERVER_IAM_SECRET= - -# The Django server requires its own "secret". -# NEDDS POPULATING -DJANGO_SECRET_KEY= - -# The database root password. -# NEEDS POPULATING if and only if deploying database containers -MYSQL_ROOT_PASSWORD= - -################################################# - - -# This script corresponds to the following version -VERSION="1.3.0-1" - -function usage { - echo "Version: $VERSION" - echo "USAGE:" - echo "./run_container.sh [ -p (y)|n ] [ -d (y)|n ] IMAGE_NAME" - echo "" - echo " -p y: Pull the image from a registry." - echo " n: Omit the docker pull command," - echo " used to run an instance of a local image." - echo " -d y: Deploy the database container" - echo " n: Do not deploy the database container," - echo " used when deploying an upgrade to the APEL Server container." - echo " -h: Displays the message." - exit -} - -# Set defaults for options -PULL="y" -DATABASE="y" - -# Process options -while getopts "d:hp:" OPT; do - case $OPT in - h) # Display the usage message - usage - ;; - p) # Pull the docker image or use locally - if [ "$OPTARG" == "y" ] || [ "$OPTARG" == "n" ] - then - PULL=$OPTARG - else - echo "Invalid -p arguement: $OPTARG" - usage - fi - ;; - d) # Deploy the database - if [ "$OPTARG" == "y" ] || [ "$OPTARG" == "n" ] - then - DATABASE=$OPTARG - else - echo "Invalid -d arguement: $OPTARG" - usage - fi - ;; - ?) - usage - ;; - esac -done - -shift "$((OPTIND-1))" # Shift off the options and optional. - -# Set arguements -if [ $# -ne 1 ] -then - usage -fi - -IMAGE_NAME=$1 -echo "This script will deploy the containers for INDIGO-DataCloud Accounting" -echo "Deploying $IMAGE_NAME with run_container.sh ($VERSION)" - -# If -p y, or if -p was ommited -if [ "$PULL" == "y" ] -then - echo "Pulling image" - docker pull $IMAGE_NAME -else - echo "Using local image" -fi - -if [ "$DATABASE" == "y" ] -then - # Deploy the databbase - echo "Deploying MySQL Container" - - docker run -v /var/lib/mysql:/var/lib/mysql --name apel-mysql -p 3306:3306 -v `pwd`/docker/etc/mysql/conf.d:/etc/mysql/conf.d -e "MYSQL_ROOT_PASSWORD=$MYSQL_ROOT_PASSWORD" -e "MYSQL_DATABASE=apel_rest" -e "MYSQL_USER=apel" -e "MYSQL_PASSWORD=$MYSQL_PASSWORD" -d mysql:5.6 - - #wait for apel-mysql to configure - sleep 60 - - # Create schema - docker exec apel-mysql mysql -u root -p$MYSQL_ROOT_PASSWORD apel_rest -e "`cat schemas/cloud.sql`" - - # Partition tables - docker exec apel-mysql mysql -u root -p$MYSQL_ROOT_PASSWORD apel_rest -e "`cat schemas/cloud-extra.sql`" - echo "MySQL Container deployed" -fi - -echo "Deploying APEL Server Container" - -docker run -d --link apel-mysql:mysql -p 80:80 -p 443:443 -v /var/spool/apel/cloud:/var/spool/apel/cloud -e "MYSQL_PASSWORD=$MYSQL_PASSWORD" -e "ALLOWED_FOR_GET=$ALLOWED_FOR_GET" -e "SERVER_IAM_ID=$SERVER_IAM_ID" -e "SERVER_IAM_SECRET=$SERVER_IAM_SECRET" -e "DJANGO_SECRET_KEY=$DJANGO_SECRET_KEY" $IMAGE_NAME - - -# this allows the APEL REST interface to configure -sleep 60 -echo "APEL Server Container deployed" -echo "Deployment complete!" From 109575e1095a624ee0a75aa36683425639ed5494 Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Wed, 27 Sep 2017 16:46:17 +0100 Subject: [PATCH 84/87] Version bump to 1.5.0 - package number has been ommitted from README.md as the features are independent of the package number --- README.md | 2 +- apel_rest/__init__.py | 2 +- api/__init__.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 09e5523..3ed750d 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ Accepted records are summarised twice daily. These summaries can be accessed wit It is currently expected that only the QoS/SLA tool will interact with these summaries. -### Features of Version 1.4.0-1 +### Features of Version 1.5.0 - Accept APEL-Cloud v0.2 and v0.4 usage records via POST requests to the REST endpoint `.../api/v1/cloud/record` - Provide access to summaries via GET requests to REST endpoint `.../api/v1/cloud/record/summary` diff --git a/apel_rest/__init__.py b/apel_rest/__init__.py index 1f7f4a6..cc4b6a7 100644 --- a/apel_rest/__init__.py +++ b/apel_rest/__init__.py @@ -15,4 +15,4 @@ @author Greg Corbett """ -__version__ = '1, 4, 0' +__version__ = '1, 5, 0' diff --git a/api/__init__.py b/api/__init__.py index 1f7f4a6..cc4b6a7 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -15,4 +15,4 @@ @author Greg Corbett """ -__version__ = '1, 4, 0' +__version__ = '1, 5, 0' From 717a28cf55d8379b766ba3cf3a9807a0527e3044 Mon Sep 17 00:00:00 2001 From: gregcorbett Date: Thu, 28 Sep 2017 09:39:14 +0100 Subject: [PATCH 85/87] Add upgrade to 1.5.0 instructions --- doc/admin.md | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/doc/admin.md b/doc/admin.md index 7ee0887..0e3991f 100644 --- a/doc/admin.md +++ b/doc/admin.md @@ -88,6 +88,37 @@ You should now have terminal access to the Accounting Server. `"['XXXXXXXXXXXX','XXXXXXXXXXXXXXXX']".` +## How to update an already deployed service to 1.5.0 (from 1.4.0) +These instructions assume the containers were previously deployed with docker-compose and they use docker-compose to upgrade to the new version + +1. Stop the APEL REST Interface container +``` +docker-compose -f yaml/docker-compose.yaml stop apel_rest_interface +``` + +2. In `yaml/apel_rest_interface.env`, change +``` +IAM_URL=https://example-iam.example.url.eu/introspect +``` +to +``` +IAM_URLS=[\'example-iam.example.url.eu\'] +``` + +3. In `yaml/docker-compose.yaml`, change +``` +indigodatacloud/accounting:1.4.0-1 +``` +to +``` +indigodatacloud/accounting:1.5.0-1 +``` + +4. Now, start the APEL Rest Interface Container +``` +docker-compose -f yaml/docker-compose.yaml up -d apel_rest_interface +``` + ## How to update an already deployed service to 1.4.0 (from 1.3.2) This section assumes previous deployment via the `docker/run_container.sh` script. From a084ba06ebc13337ca04f3f49aa2c91572a0932a Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Thu, 28 Sep 2017 09:54:23 +0100 Subject: [PATCH 86/87] Update kubernetes to support multiple IAMs --- yaml/accounting-rest-interface-rc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yaml/accounting-rest-interface-rc.yaml b/yaml/accounting-rest-interface-rc.yaml index 3f30f62..4be210a 100644 --- a/yaml/accounting-rest-interface-rc.yaml +++ b/yaml/accounting-rest-interface-rc.yaml @@ -35,8 +35,8 @@ spec: value: - name: PROVIDERS_URL value: "http://indigo.cloud.plgrid.pl/cmdb/service/list" - - name: IAM_URL - value: "https://iam-test.indigo-datacloud.eu/introspect" + - name: IAM_URLS + value: "[iam-test.indigo-datacloud.eu]" - name: ALLOWED_TO_POST value: "[]" - name: BANNED_FROM_POST From eb29573eee5280c86ed3bb54af75277fa46c9e5c Mon Sep 17 00:00:00 2001 From: Greg Corbett Date: Thu, 28 Sep 2017 09:54:39 +0100 Subject: [PATCH 87/87] Use future indigo tag in yaml files --- yaml/accounting-rest-interface-rc.yaml | 2 +- yaml/ansible.yml | 2 +- yaml/docker-compose.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/yaml/accounting-rest-interface-rc.yaml b/yaml/accounting-rest-interface-rc.yaml index 4be210a..d8227d9 100644 --- a/yaml/accounting-rest-interface-rc.yaml +++ b/yaml/accounting-rest-interface-rc.yaml @@ -13,7 +13,7 @@ spec: spec: containers: - name: accounting - image: indigodatacloud/accounting:1.4.0-1 + image: indigodatacloud/accounting:1.5.0-1 ports: - containerPort: 80 hostPort: 80 diff --git a/yaml/ansible.yml b/yaml/ansible.yml index 01e6dec..286bdde 100644 --- a/yaml/ansible.yml +++ b/yaml/ansible.yml @@ -35,7 +35,7 @@ # task for ansible-dockerized-apel-rest-interface - name: Launch APEL REST Interface Container docker_container: - image: indigodatacloud/accounting:1.4.0-1 + image: indigodatacloud/accounting:1.5.0-1 name: apel_rest_interface links: apel_mysql ports: diff --git a/yaml/docker-compose.yaml b/yaml/docker-compose.yaml index 69b5727..44409a2 100644 --- a/yaml/docker-compose.yaml +++ b/yaml/docker-compose.yaml @@ -3,7 +3,7 @@ services: apel_rest_interface: links: - apel_mysql - image: indigodatacloud/accounting:1.4.0-1 + image: indigodatacloud/accounting:1.5.0-1 ports: - "80:80" - "443:443"