Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_encodeTrueConversion(self):
input = True
output = ujson.encode(input)
self.assertEquals(input, json.loads(output))
self.assertEquals(output, json.dumps(input))
self.assertEquals(input, ujson.decode(output))
pass
def test_decodeDictWithNoKey(self):
input = "{{{{31337}}}}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_encodeDoubleNegConversion(self):
input = -math.pi
output = ujson.encode(input)
self.assertEquals(round(input, 5), round(json.loads(output), 5))
self.assertEquals(round(input, 5), round(ujson.decode(output), 5))
pass
def test_decodeBrokenObjectStart(self):
input = "{"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeNullBroken(self):
input = "n"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_encodeIntNegConversion(self):
input = -31337
output = ujson.encode(input)
self.assertEquals(input, json.loads(output))
self.assertEquals(output, json.dumps(input))
self.assertEquals(input, ujson.decode(output))
pass
def load_bulk_bills(bill_directory):
bill_files = []
for dirname, dirnames, filenames in os.walk(bill_directory):
for filename in filenames:
bill_files.append(os.path.join(dirname, filename))
bulk_data = []
for i, bill_file in enumerate(bill_files):
data_dict = ujson.decode(open(bill_file).read())
bill_dict = {}
bill_text_count = [1 for x in data_dict['type'] if "bill" in x.lower()]
if sum(bill_text_count) < 1:
continue
bill_id = re.sub("\s+", "", data_dict['bill_id'])
try:
if data_dict['versions'] == []:
bill_document_first = ""
bill_document_last = ""
else:
bill_document_first = base64.b64decode(data_dict['versions'][0]['bill_document'])
bill_document_first = parser.from_buffer(bill_document_first)['content']
bill_document_last = base64.b64decode(data_dict['versions'][-1]['bill_document'])
def check_extractor(doc_path, func, state):
json_obj = ujson.decode(open(doc_path).read())
url = "{0}/{1}".format("http://static.openstates.org/documents/" + state, json_obj['versions'][0]['doc_id'])
doc = urllib2.urlopen(url).read()
extracted = func(doc)
print extracted
@permission_required("graphs.view_graph", (Graph, "slug", "graph_slug"),
return_403=True)
def graph_analytics_boxes_edit_position(request, graph_slug):
if ((request.is_ajax() or settings.DEBUG) and request.POST):
data = request.POST.copy()
params = None
for key in data:
params = key
break
params = json.decode(params)
graph = get_object_or_404(Graph, slug=graph_slug)
with transaction.atomic():
graph.set_option('collapsibles', params['collapsibles'])
graph.set_option('positions', params['positions'])
graph.save()
return HttpResponse(status=200, content_type='application/json')
raise Http404(_("Error: Invalid request (expected an AJAX POST request)"))
def get_json_loads():
global _json_loads, _json_module_name
if _json_loads is None:
# Timings reported for a fragment file with 37177 lines
# (35634 "RECORD" and 1534 "IGNORE" records.)
try:
# 40.05 seconds
import ujson
_json_loads = ujson.decode
_json_module_name = "ujson"
except ImportError:
try:
# 41.85 seconds
import cjson
_json_loads = cjson.decode
_json_module_name = "cjson"
except ImportError:
# 55.5 seconds
_json_loads = json.loads
_json_module_name = "json"
return _json_loads