Fix compression, date fields and add some tests

- Issue with compression due to lack of json.dumps
- Use "auto_now_add=True" instead of "django.utils.timezone.now"
- More tests

Change-Id: I7447750b843d1622db5489e1045592945be1d090
This commit is contained in:
Guillaume Vincent 2018-03-19 12:40:46 +01:00 committed by David Moreau Simard
parent 167fc26311
commit 69cc8dcac1
No known key found for this signature in database
GPG Key ID: 33A07694CBB71ECC
7 changed files with 164 additions and 216 deletions

View File

@ -1,8 +1,7 @@
# Generated by Django 2.0.3 on 2018-03-17 19:47
# Generated by Django 2.0.3 on 2018-03-19 11:38
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
@ -63,7 +62,7 @@ class Migration(migrations.Migration):
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('started', models.DateTimeField(default=django.utils.timezone.now)),
('started', models.DateTimeField(auto_now_add=True)),
('ended', models.DateTimeField(blank=True, null=True)),
('name', models.TextField(blank=True, null=True)),
],
@ -77,7 +76,7 @@ class Migration(migrations.Migration):
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('started', models.DateTimeField(default=django.utils.timezone.now)),
('started', models.DateTimeField(auto_now_add=True)),
('ended', models.DateTimeField(blank=True, null=True)),
('path', models.CharField(max_length=255)),
('ansible_version', models.CharField(max_length=255)),
@ -109,7 +108,7 @@ class Migration(migrations.Migration):
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('started', models.DateTimeField(default=django.utils.timezone.now)),
('started', models.DateTimeField(auto_now_add=True)),
('ended', models.DateTimeField(blank=True, null=True)),
('status', models.CharField(choices=[('ok', 'ok'), ('failed', 'failed'), ('skipped', 'skipped'), ('unreachable', 'unreachable'), ('unknown', 'unknown')], default='unknown', max_length=25)),
('changed', models.BooleanField(default=False)),
@ -132,7 +131,7 @@ class Migration(migrations.Migration):
('id', models.BigAutoField(editable=False, primary_key=True, serialize=False)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('started', models.DateTimeField(default=django.utils.timezone.now)),
('started', models.DateTimeField(auto_now_add=True)),
('ended', models.DateTimeField(blank=True, null=True)),
('name', models.TextField(blank=True, null=True)),
('action', models.TextField()),

View File

@ -68,10 +68,10 @@ class Base(models.Model):
"""
class Meta:
abstract = True
# note: GV better is id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
id = models.BigAutoField(primary_key=True, editable=False)
created = models.DateTimeField(auto_now_add=True, editable=False)
updated = models.DateTimeField(auto_now=True, editable=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class DurationMixin(models.Model):

View File

@ -33,6 +33,7 @@ class CompressedTextField(serializers.CharField):
Compresses text before storing it in the database.
Decompresses text from the database before serving it.
"""
def to_representation(self, obj):
return zlib.decompress(obj).decode('utf8')
@ -46,11 +47,12 @@ class CompressedObjectField(serializers.JSONField):
database.
Decompresses/deserializes an object before serving it.
"""
def to_representation(self, obj):
return json.loads(zlib.decompress(obj).decode('utf8'))
def to_internal_value(self, data):
return zlib.compress(data.encode('utf8'))
return zlib.compress(json.dumps(data).encode('utf8'))
class ItemDurationField(serializers.DurationField):
@ -58,6 +60,7 @@ class ItemDurationField(serializers.DurationField):
Calculates duration between started and ended or between started and
updated if we do not yet have an end.
"""
def __init__(self, **kwargs):
kwargs['read_only'] = True
super(ItemDurationField, self).__init__(**kwargs)
@ -75,6 +78,7 @@ class BaseSerializer(serializers.ModelSerializer):
"""
Serializer for the data in the model base
"""
class Meta:
abstract = True
@ -93,190 +97,59 @@ class DurationSerializer(serializers.ModelSerializer):
"""
Serializer for duration-based fields
"""
class Meta:
abstract = True
started = serializers.DateTimeField(
initial=timezone.now().isoformat(),
help_text='Date this item started %s' % DATE_FORMAT
)
ended = serializers.DateTimeField(
required=False,
help_text='Date this item ended %s' % DATE_FORMAT
)
duration = ItemDurationField(source='*')
duration = serializers.SerializerMethodField()
def validate(self, data):
"""
Check that the start is before the end.
"""
if 'ended' in data and (data['started'] > data['ended']):
raise serializers.ValidationError(
"'Ended' must be before 'started'"
)
return data
@staticmethod
def get_duration(obj):
if obj.ended is None:
return timezone.now() - obj.started
return obj.ended - obj.started
class PlaybookSerializer(serializers.HyperlinkedModelSerializer, BaseSerializer, DurationSerializer):
class PlaybookSerializer(DurationSerializer):
class Meta:
model = models.Playbook
fields = '__all__'
plays = serializers.HyperlinkedRelatedField(
many=True,
view_name='play-detail',
read_only=True,
help_text='Plays associated to this playbook'
)
tasks = serializers.HyperlinkedRelatedField(
many=True,
view_name='task-detail',
read_only=True,
help_text='Tasks associated to this playbook'
)
# hosts = serializers.HyperlinkedRelatedField(
# many=True,
# read_only=True,
# view_name='hosts',
# help_text='Hosts associated to this playbook'
# )
# results = serializers.HyperlinkedRelatedField(
# many=True,
# read_only=True,
# view_name='results',
# help_text='Results associated to this playbook'
# )
# records = serializers.HyperlinkedRelatedField(
# many=True,
# read_only=True,
# view_name='records',
# help_text='Records associated to this playbook'
# )
files = serializers.HyperlinkedRelatedField(
many=True,
view_name='file-detail',
read_only=True,
help_text='Files associated to this playbook'
)
parameters = CompressedObjectField(
initial={},
help_text='A JSON dictionary containing Ansible command parameters'
)
path = serializers.CharField(help_text='Path to the playbook file')
ansible_version = serializers.CharField(
help_text='Version of Ansible used to run this playbook'
)
parameters = CompressedObjectField(
default=zlib.compress(json.dumps({}).encode('utf8')),
help_text='A JSON dictionary containing Ansible command parameters'
)
completed = serializers.BooleanField(
default=False,
help_text='If the completion of the execution has been acknowledged'
)
plays = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
tasks = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
files = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
hosts = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
results = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
records = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class PlaySerializer(serializers.HyperlinkedModelSerializer, BaseSerializer, DurationSerializer):
class PlaySerializer(DurationSerializer):
class Meta:
model = models.Play
fields = '__all__'
playbook = serializers.HyperlinkedRelatedField(
view_name='playbook-detail',
read_only=True,
help_text='Playbook associated to this play'
)
tasks = serializers.HyperlinkedRelatedField(
many=True,
view_name='task-detail',
read_only=True,
help_text='Tasks associated to this play'
)
name = serializers.CharField(
help_text='Name of the play',
allow_blank=True,
allow_null=True,
)
# hosts = serializers.HyperlinkedRelatedField(
# many=True,
# view_name='host-detail',
# read_only=True,
# help_text='Hosts associated to this play'
#)
class TaskSerializer(serializers.HyperlinkedModelSerializer, BaseSerializer, DurationSerializer):
class TaskSerializer(DurationSerializer):
class Meta:
model = models.Task
fields = '__all__'
playbook = serializers.HyperlinkedRelatedField(
view_name='playbook-detail',
read_only=True,
help_text='Playbook associated to this task'
)
play = serializers.HyperlinkedRelatedField(
view_name='play-detail',
read_only=True,
help_text='Play associated to this task'
)
file = serializers.HyperlinkedRelatedField(
view_name='file-detail',
read_only=True,
help_text='File associated to this task'
)
# results = serializers.HyperlinkedRelatedField(
# many=True,
# view_name='result-detail',
# read_only=True,
# help_text='Results associated to this task'
# )
name = serializers.CharField(
help_text='Name of the task',
allow_blank=True,
allow_null=True
)
action = serializers.CharField(help_text='Action of the task')
lineno = serializers.IntegerField(
help_text='Line number in the file of the task'
)
tags = CompressedObjectField(
help_text='A JSON list containing Ansible tags',
initial=[],
default=[],
)
handler = serializers.BooleanField(
help_text='Whether or not this task was a handler',
initial=False,
default=False,
)
#
#
# class HostSerializer(BaseSerializer):
# class Meta:
# model = models.Host
# fields = '__all__'
#
#
# class ResultSerializer(BaseSerializer, DurationSerializer):
# class Meta:
# model = models.Result
# fields = '__all__'
# @property
# def derived_status(self):
# if self.status == self.OK and self.changed:
# return self.CHANGED
# elif self.status == self.FAILED and self.ignore_errors:
# return self.IGNORED
# elif self.status not in [
# self.OK, self.FAILED, self.SKIPPED, self.UNREACHABLE
# ]:
# return self.UNKNOWN
# else:
# return self.status
#
# class RecordSerializer(BaseSerializer):
# class Meta:
# model = models.Record
# fields = '__all__'
#
tags = CompressedObjectField(
default=zlib.compress(json.dumps([]).encode('utf8')),
help_text='A JSON list containing Ansible tags'
)
class FileContentSerializer(BaseSerializer):
@ -284,19 +157,16 @@ class FileContentSerializer(BaseSerializer):
model = models.FileContent
fields = '__all__'
contents = CompressedTextField(help_text='Contents of the file')
sha1 = serializers.CharField(read_only=True, help_text='sha1 of the file')
contents = CompressedTextField(help_text='Contents of the file')
def create(self, validated_data):
sha1 = hashlib.sha1(validated_data['contents']).hexdigest()
validated_data['sha1'] = sha1
obj, created = models.FileContent.objects.get_or_create(
**validated_data
)
return obj
return models.FileContent.objects.create(**validated_data)
class FileSerializer(serializers.HyperlinkedModelSerializer, BaseSerializer):
class FileSerializer(BaseSerializer):
class Meta:
model = models.File
fields = '__all__'

View File

@ -9,3 +9,12 @@ class PlaybookFactory(factory.DjangoModelFactory):
path = '/tmp/playbook.yml'
ansible_version = '2.4.0'
parameters = b'x\x9c\xabVJ\xcb\xcfW\xb2RPJJ,R\xaa\x05\x00 \x98\x04T'
class FileContentFactory(factory.DjangoModelFactory):
class Meta:
model = models.FileContent
sha1 = '1e58ead094c920fad631d2c22df34dc0314dab0c'
contents = b'x\x9cSV(\xc8I\xacL\xca\xcf\xcf\x06\x00\x11\xbd\x03\xa5'

View File

@ -0,0 +1,23 @@
from rest_framework.test import APITestCase
from api import serializers
from api.tests import factories
class FileContentTestCase(APITestCase):
def test_file_content_factory(self):
file_content = factories.FileContentFactory(sha1='413a2f16b8689267b7d0c2e10cdd19bf3e54208d')
self.assertEqual(file_content.sha1, '413a2f16b8689267b7d0c2e10cdd19bf3e54208d')
def test_file_content_serializer_compress_contents(self):
serializer = serializers.FileContentSerializer(data={'contents': '# playbook'})
serializer.is_valid()
file_content = serializer.save()
file_content.refresh_from_db()
self.assertEqual(file_content.sha1, '1e58ead094c920fad631d2c22df34dc0314dab0c')
self.assertEqual(file_content.contents, b'x\x9cSV(\xc8I\xacL\xca\xcf\xcf\x06\x00\x11\xbd\x03\xa5')
def test_file_content_serializer_decompress_contents(self):
file_content = factories.FileContentFactory(contents=b'x\x9cSV(\xc8I\xacL\xca\xcf\xcf\x06\x00\x11\xbd\x03\xa5')
serializer = serializers.FileContentSerializer(instance=file_content)
self.assertEqual(serializer.data['contents'], '# playbook')

View File

@ -1,10 +1,48 @@
import datetime
from django.utils import timezone
from rest_framework.test import APITestCase
from rest_framework.test import APIRequestFactory
from rest_framework.request import Request
from api import models
from api import models, serializers
from api.tests import factories
class PlaybookTestCase(APITestCase):
def test_playbook_factory(self):
playbook = factories.PlaybookFactory(path='/tmp/playbook.yml', ansible_version='2.4.0')
self.assertEqual(playbook.path, '/tmp/playbook.yml')
self.assertEqual(playbook.ansible_version, '2.4.0')
def test_playbook_serializer(self):
serializer = serializers.PlaybookSerializer(data={
'path': '/tmp/playbook.yml',
'ansible_version': '2.4.0'
})
serializer.is_valid()
playbook = serializer.save()
playbook.refresh_from_db()
self.assertEqual(playbook.path, '/tmp/playbook.yml')
self.assertEqual(playbook.ansible_version, '2.4.0')
def test_playbook_serializer_compress_parameters(self):
serializer = serializers.PlaybookSerializer(data={
'path': '/tmp/playbook.yml',
'ansible_version': '2.4.0',
'parameters': {'foo': 'bar'}
})
serializer.is_valid()
playbook = serializer.save()
playbook.refresh_from_db()
self.assertEqual(playbook.parameters, b'x\x9c\xabVJ\xcb\xcfW\xb2RPJJ,R\xaa\x05\x00 \x98\x04T')
def test_playbook_serializer_decompress_parameters(self):
playbook = factories.PlaybookFactory(parameters=b'x\x9c\xabVJ\xcb\xcfW\xb2RPJJ,R\xaa\x05\x00 \x98\x04T')
serializer = serializers.PlaybookSerializer(instance=playbook, context={
'request': Request(APIRequestFactory().get('/')),
})
self.assertEqual(serializer.data['parameters'], {'foo': 'bar'})
def test_get_no_playbooks(self):
request = self.client.get('/api/v1/playbooks/')
self.assertEqual(0, len(request.data['results']))
@ -43,3 +81,16 @@ class PlaybookTestCase(APITestCase):
self.assertEqual(200, request.status_code)
playbook_updated = models.Playbook.objects.get(id=playbook.id)
self.assertEqual('/home/ara/playbook.yml', playbook_updated.path)
def test_get_playbook(self):
playbook = factories.PlaybookFactory()
request = self.client.get('/api/v1/playbooks/%s/' % playbook.id)
self.assertEqual(playbook.path, request.data['path'])
self.assertEqual(playbook.ansible_version, request.data['ansible_version'])
def test_get_playbook_duration(self):
started = timezone.now()
ended = started + datetime.timedelta(hours=1)
playbook = factories.PlaybookFactory(started=started, ended=ended)
request = self.client.get('/api/v1/playbooks/%s/' % playbook.id)
self.assertEqual(request.data['duration'], datetime.timedelta(0, 3600))

View File

@ -18,87 +18,83 @@
# Creates mock data offline leveraging the API
import django
import hashlib
import json
import os
import sys
from django.core import serializers
parent_directory = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(parent_directory)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ara.settings')
django.setup()
from api import models
from django.test import Client
def post(endpoint, data):
client = Client()
print("Posting to %s..." % endpoint)
obj = client.post(endpoint, data)
obj = client.post(endpoint,
json.dumps(data),
content_type="application/json")
print("HTTP %s" % obj.status_code)
print("Got: %s" % json.dumps(obj.json(), indent=2))
print("#" * 40)
return obj
return obj.json()
playbook = post(
'/api/v1/playbooks/',
dict(
started='2016-05-06T17:20:25.749489-04:00',
path='/tmp/playbook.yml',
ansible_version='2.3.4',
completed=False,
parameters=json.dumps(dict(
foo='bar'
))
)
{
'started': '2016-05-06T17:20:25.749489-04:00',
'path': '/tmp/playbook.yml',
'ansible_version': '2.3.4',
'completed': False,
'parameters': {'foo': 'bar'}
}
)
play = post(
'/api/v1/plays/',
dict(
started='2016-05-06T17:20:25.749489-04:00',
name='Test play',
playbook=playbook.json()['url']
)
{
'started': '2016-05-06T17:20:25.749489-04:00',
'name': 'Test play',
'playbook': playbook['id']
}
)
playbook_file = post(
'/api/v1/files/',
dict(
path=playbook.json()['path'],
{
'path': playbook['path'],
# TODO: Fix this somehow
content='# playbook',
playbook=playbook.json()['url'],
is_playbook=True
)
'content': '# playbook',
'playbook': playbook['id'],
'is_playbook': True
}
)
task_file = post(
'/api/v1/files/',
dict(
playbook=playbook.json()['url'],
path='/tmp/task.yml',
{
'playbook': playbook['id'],
'path': '/tmp/task.yml',
# TODO: Fix this somehow
content='# task',
is_playbook=True
)
'content': '# task',
'is_playbook': True
}
)
task = post(
'/api/v1/tasks/',
dict(
playbook=playbook.json()['url'],
play=play.json()['url'],
file=task_file.json()['url'],
name='Task name',
action='action',
lineno=1,
tags=json.dumps(['one', 'two']),
handler=False,
started='2016-05-06T17:20:25.749489-04:00'
)
{
'playbook': playbook['id'],
'play': play['id'],
'file': task_file['id'],
'name': 'Task name',
'action': 'action',
'lineno': 1,
'tags': ['one', 'two'],
'handler': False,
'started': '2016-05-06T17:20:25.749489-04:00'
}
)