Merge "Implement Akamai caching rules"

This commit is contained in:
Jenkins 2014-12-04 21:55:33 +00:00 committed by Gerrit Code Review
commit fbc43276c2
6 changed files with 120 additions and 6 deletions

View File

@ -69,6 +69,12 @@ class ServiceController(base.ServiceBase):
self._process_referrer_restriction(referrer_whitelist_value,
rule)
# implementing caching-rules for akamai
# we do not have to use copy here, since caching is only used once
caching_rules = service_obj.caching
# Traverse existing rules list to add caching rules necessarys
self._process_caching_rules(caching_rules, post_data['rules'])
classified_domains = self._classify_domains(service_obj.domains)
try:
@ -182,6 +188,66 @@ class ServiceController(base.ServiceBase):
'value': referrer_whitelist_value
})
def _process_caching_rules(self, caching_rules, rules_list):
for caching_rule in caching_rules:
if caching_rule.name.lower() == 'default':
for rule in rules_list:
# this branch could not be hit when there is no
# 'default' origin rule
matches_dict = rule['matches'][0]
if (matches_dict['name'] == 'url-wildcard' or
matches_dict['name'] == 'url-path') and (
matches_dict['value'] == '/*'):
rule['behaviors'].append({
'name': 'caching',
'type': 'fixed',
# assuming the input number to caching rule
# ttl is in second
'value': '%ss' % caching_rule.ttl
})
caching_rules.remove(caching_rule)
else:
for rule in rules_list:
matches_dict = rule['matches'][0]
if matches_dict['name'] == 'url-wildcard':
for r in caching_rule.rules:
if r.request_url == matches_dict['value']:
rule['behaviors'].append({
'name': 'caching',
'type': 'fixed',
# assuming the input number to caching rule
# ttl is in second
'value': '%ss' % caching_rule.ttl
})
caching_rule.rules.remove(r)
if caching_rule.rules == []:
# in this case all the rule for this caching
# rule has been processed
caching_rules.remove(caching_rule)
# at this point, all the unprocessed rules are still left in caching
# rules list, wee need to add separate rule for that
for caching_rule in caching_rules:
rule_dict_template = {
'matches': [],
'behaviors': []
}
for rule in caching_rule.rules:
match_rule = {
'name': 'url-wildcard',
'value': rule.request_url
}
rule_dict_template['matches'].append(match_rule)
rule_dict_template['behaviors'].append({
'name': 'caching',
'type': 'fixed',
# assuming the input number to caching rule
# ttl is in second
'value': '%ss' % caching_rule.ttl
})
rules_list.append(rule_dict_template)
caching_rules.remove(caching_rule)
def get(self, service_name):
pass
@ -243,6 +309,12 @@ class ServiceController(base.ServiceBase):
self._process_referrer_restriction(
referrer_whitelist_value, rule)
# implementing caching-rules for akamai
# we do not have to use copy here, since caching is only used once
caching_rules = service_obj.caching
# Traverse existing rules list to add caching rules necessarys
self._process_caching_rules(caching_rules, policy_content['rules'])
# Update domain if necessary ( by adjust digital property)
classified_domains = self._classify_domains(service_obj.domains)
@ -335,6 +407,12 @@ class ServiceController(base.ServiceBase):
self._process_referrer_restriction(
referrer_whitelist_value, rule)
# implementing caching-rules for akamai
caching_rules = service_obj.caching
# Traverse existing rules list to add caching rules necessarys
self._process_caching_rules(caching_rules,
policy_content['rules'])
# post new policies back with Akamai Policy API
try:
LOG.info('Start to update policy %s ' % policy)

View File

@ -15,6 +15,7 @@
import json
from poppy.model.helpers import cachingrule
from poppy.model.helpers import domain
from poppy.model.helpers import origin
from poppy.model.helpers import provider_details
@ -347,6 +348,8 @@ class ServicesController(base.ServicesController):
domains = [json.loads(d) for d in result.get('domains', []) or []]
restrictions = [json.loads(r)
for r in result.get('restrictions', []) or []]
caching_rules = [json.loads(c) for c in result.get('caching_rules', [])
or []]
# create models for each item
origins = [
@ -370,8 +373,18 @@ class ServicesController(base.ServicesController):
for r_rule in r['rules']])
for r in restrictions]
caching_rules = [cachingrule.CachingRule(
caching_rule.get('name'),
caching_rule.get('ttl'),
[rule.Rule(rule_i.get('name'),
referrer=rule_i.get('request_url'))
for rule_i in caching_rule['rules']])
for caching_rule in caching_rules]
# create the service object
s = service.Service(name, domains, origins, flavor_ref, restrictions)
s = service.Service(name, domains, origins, flavor_ref,
caching=caching_rules,
restrictions=restrictions)
# format the provider details
provider_detail_results = result.get('provider_details') or {}

View File

@ -29,4 +29,5 @@ class Model(collections.OrderedDict):
super(Model, self).__init__()
self['name'] = caching.name
self['ttl'] = caching.ttl
self['rules'] = [rule.Model(r) for r in caching.rules]
if caching.rules != []:
self['rules'] = [rule.Model(r) for r in caching.rules]

View File

@ -19,6 +19,7 @@ except ImportError: # pragma: no cover
import collections # pragma: no cover
from poppy.common import uri
from poppy.transport.pecan.models.response import cachingrules
from poppy.transport.pecan.models.response import domain
from poppy.transport.pecan.models.response import link
from poppy.transport.pecan.models.response import origin
@ -36,6 +37,8 @@ class Model(collections.OrderedDict):
self["origins"] = [origin.Model(o) for o in service_obj.origins]
self["restrictions"] = [restriction.Model(r) for r in
service_obj.restrictions]
self["caching"] = [cachingrules.Model(c) for c in
service_obj.caching]
self["status"] = service_obj.status
self["flavor_ref"] = uri.encode(u'{0}/v1.0/flavors/{1}'.format(
request.host_url,

View File

@ -41,7 +41,7 @@
],
"flavor_ref" : "standard"
},
"multiple_origins_complicated_with_referrer_restriction": {
"multiple_origins_complicated_with_referrer_restriction_and_caching": {
"name" : "mysite.com",
"domains": [
{"domain": "parsely.sage.com"},
@ -56,7 +56,7 @@
"rules": [{"name": "img", "request_url": "/img"}] }
],
"restrictions": [
{
{
"name": "website only",
"rules": [
{
@ -75,6 +75,20 @@
]
}
],
"caching": [
{"name": "default", "ttl": 1200 },
{"name": "img-only",
"ttl": 1800,
"rules": [
{ "name": "jpeg-rules",
"request_url": "/*.jpeg"
},
{ "name": "gif-rules",
"request_url": "/*.gif"
}
]
}
],
"flavor_ref" : "standard"
}
}

View File

@ -77,7 +77,7 @@ class TestServices(base.TestCase):
service_obj = service.load_from_json(service_json)
# test exception
self.controller.policy_api_client.delete.return_value = mock.Mock(
self.controller.policy_api_client.put.return_value = mock.Mock(
status_code=400,
text='Some create service error happened'
)
@ -88,9 +88,14 @@ class TestServices(base.TestCase):
@ddt.file_data('data_service.json')
def test_create(self, service_json):
service_obj = service.load_from_json(service_json)
self.controller.policy_api_client.put.return_value = mock.Mock(
status_code=200,
text='Put successful'
)
self.controller.create(service_obj)
self.controller.policy_api_client.put.assert_called_once()
# make sure all the caching rules are processed
self.assertTrue(service_obj.caching == [])
def test_delete_with_exception(self):
provider_service_id = json.dumps([str(uuid.uuid1())])