Nir Soffer has uploaded a new change for review.
Change subject: Fix tokenizing of unsupported data in vdsmapi-schema.json
......................................................................
Fix tokenizing of unsupported data in vdsmapi-schema.json
vdmapi tokenizer got into an endless loop when unsupported token was
found. Example input trigering this are invalid tokens such as unquoted
string, or valid JSON tokens like null, true and false, which are not
supported by the parser.
Now the tokenizer raise ValueError with the offending data. This should
be good enough for developers usage.
New unittests cover tokenizing both valid, invalid and unspported tokens.
Change-Id: I7d29686d3189fd9e64e26c56dcd9da370f855406
Signed-off-by: Nir Soffer <nsoffer(a)redhat.com>
---
M tests/schemaTests.py
M vdsm_api/vdsmapi.py
2 files changed, 70 insertions(+), 0 deletions(-)
git pull ssh://gerrit.ovirt.org:29418/vdsm refs/changes/97/19697/1
diff --git a/tests/schemaTests.py b/tests/schemaTests.py
index ae82b9f..9b709ae 100644
--- a/tests/schemaTests.py
+++ b/tests/schemaTests.py
@@ -22,5 +22,73 @@
class SchemaTest(TestCaseBase):
+
def testSchemaParse(self):
self.assertTrue(isinstance(vdsmapi.get_api(), dict))
+
+ ## Supported JSON syntax
+
+ def testTokenizeEmpty(self):
+ tokens = list(vdsmapi.tokenize(''))
+ self.assertEqual(tokens, [])
+
+ def testTokenizeString(self):
+ tokens = list(vdsmapi.tokenize("'string'"))
+ self.assertEqual(tokens, ['string'])
+
+ def testTokenizeStringWithWhitespace(self):
+ tokens = list(vdsmapi.tokenize("'s1 s2'"))
+ self.assertEqual(tokens, ['s1 s2'])
+
+ def testTokenizeStringEmpty(self):
+ tokens = list(vdsmapi.tokenize("''"))
+ self.assertEqual(tokens, [''])
+
+ def testTokenizeArray(self):
+ tokens = list(vdsmapi.tokenize("['i1', 'i2']"))
+ self.assertEqual(tokens, ['[', 'i1', ',', 'i2',
']'])
+
+ def testTokenizeArrayEmpty(self):
+ tokens = list(vdsmapi.tokenize("[]"))
+ self.assertEqual(tokens, ['[', ']'])
+
+ def testTokenizeObject(self):
+ tokens = list(vdsmapi.tokenize("{'a': 'b', 'c':
'd'}"))
+ self.assertEqual(tokens, ['{', 'a', ':', 'b',
',', 'c', ':', 'd', '}'])
+
+ def testTokenizeObjectEmpty(self):
+ tokens = list(vdsmapi.tokenize("{}"))
+ self.assertEqual(tokens, ['{', '}'])
+
+ def testTokenizeMixed(self):
+ tokens = list(vdsmapi.tokenize("{'a': {'b':
['c']}}"))
+ self.assertEqual(tokens, ['{', 'a', ':', '{',
'b', ':', '[', 'c', ']',
+ '}', '}'])
+
+ def testTokenizeSkipWhitespaceBetweenTokens(self):
+ tokens = list(vdsmapi.tokenize(" { 'a': \n 'b' ,
'c'\n\n : 'd' } \n"))
+ self.assertEqual(tokens, ['{', 'a', ':', 'b',
',', 'c', ':', 'd', '}'])
+
+ ## Unsupported JSON syntax
+
+ def testTokenizeRaiseOnNumber(self):
+ generator = vdsmapi.tokenize("1")
+ self.assertRaises(ValueError, list, generator)
+
+ def testTokenizeRaiseOnTrue(self):
+ generator = vdsmapi.tokenize("true")
+ self.assertRaises(ValueError, list, generator)
+
+ def testTokenizeRaiseOnFalse(self):
+ generator = vdsmapi.tokenize("false")
+ self.assertRaises(ValueError, list, generator)
+
+ def testTokenizeRaiseOnNull(self):
+ generator = vdsmapi.tokenize("null")
+ self.assertRaises(ValueError, list, generator)
+
+ ## Invalid JSON
+
+ def testTokenizeRaiseOnInvalidData(self):
+ generator = vdsmapi.tokenize("{'a': invalid, 'b':
'c'}")
+ self.assertRaises(ValueError, list, generator)
diff --git a/vdsm_api/vdsmapi.py b/vdsm_api/vdsmapi.py
index 4747dcb..db29c13 100644
--- a/vdsm_api/vdsmapi.py
+++ b/vdsm_api/vdsmapi.py
@@ -49,6 +49,8 @@
data = data[1:]
data = data[1:]
yield string
+ else:
+ raise ValueError('Invalid data: %r' % data)
def parse(tokens):
--
To view, visit
http://gerrit.ovirt.org/19697
To unsubscribe, visit
http://gerrit.ovirt.org/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I7d29686d3189fd9e64e26c56dcd9da370f855406
Gerrit-PatchSet: 1
Gerrit-Project: vdsm
Gerrit-Branch: master
Gerrit-Owner: Nir Soffer <nsoffer(a)redhat.com>