Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
original_query_list_file = os.path.join(output_location, "original_query_list.pkl")
expected_result_list_file = os.path.join(output_location, "expected_result_list.pkl")
originalquery_exresult_file = os.path.join(output_location, "originalquery_exresult.pkl")
queries_returned_list_file = os.path.join(output_location, "queries_returned_list.pkl")
original_query_puc_list_file = os.path.join(output_location, "original_query_puc_list.pkl")
url_query_map_file = os.path.join(output_location, "url_query_map.pkl")
original_query_list = [] # Simple List of Query Strings
expected_result_list = [] # List of List of Expected Results
originalquery_exresult = {} # Mapping of the queries with the expected result list (urls)
queries_returned_list = [] # List of all the queries returned by the Big Machine
original_query_puc_list = {} # Mapping of original queries with the expected list of urls
url_query_map = {} # Mapping of the PUC URL (Page) as key and the list of queries which leads to that page.
unpacker = msgpack.Unpacker(open(query_data_filename, 'r'))
numJobs = 5
def getQueryParameters(query_record):
record_json_dump = json.dumps(query_record)
single_record = json.loads(record_json_dump)
# Original Query
query_original = single_record["query"].encode("utf-8")
# print "[original_query]\t"+str(query_original)
original_query_list.append(query_original)
# Expected result returned by the system.
expected_result = single_record["ex"]
ex_res_count = 0
indiv_expected_result = []
for ex in expected_result:
# print "[expected_result]("+str(ex_res_count)+")\t"+str(ex.encode("utf-8"))
indiv_expected_result.append(ex.encode("utf-8"))
def unpack_gen(file, size):
u = Unpacker()
while True:
data = file.read(size)
if not data:
break
u.feed(data)
for o in u:
yield o
def properties(self, *props):
"""Get document properties"""
self._parse_state(1)
self.rawdata.seek(self._prop[0])
unpacker = msgpack.Unpacker(self.rawdata, raw=False)
return MsgpackCodec.decode_property(unpacker, *props)
def _read_msgpack_file(self, file_like, **kwargs):
# current impl doesn't torelate any unpack error
unpacker = msgpack.Unpacker(file_like)
for record in unpacker:
self._validate_record(record)
yield record
use_single_float=False,
autoreset=True,
use_bin_type=True,
strict_types=False,
datetime=False,
unicode_errors=None):
default = functools.partial(encode, chain=default)
super(Packer, self).__init__(default=default,
use_single_float=use_single_float,
autoreset=autoreset,
use_bin_type=use_bin_type,
strict_types=strict_types,
datetime=datetime,
unicode_errors=unicode_errors)
class Unpacker(_Unpacker):
def __init__(self,
file_like=None,
read_size=0,
use_list=True,
raw=False,
timestamp=0,
strict_map_key=True,
object_hook=None,
object_pairs_hook=None,
list_hook=None,
unicode_errors=None,
max_buffer_size=100 * 1024 * 1024,
ext_hook=msgpack.ExtType,
max_str_len=-1,
max_bin_len=-1,
max_array_len=-1,
def get_iterator(self, index: int) -> Iterable[Tuple[bytes, Optional[bytes]]]:
self._seek_to_log_start_offset(index)
size: int = self._read_uint32()
unpacker = msgpack.Unpacker(use_list=False, raw=True)
while size > 0:
size_to_read = min(size, 16384)
data: bytes = self._fp.read(size_to_read)
self._check_bytes_data(data, size_to_read)
size -= size_to_read
unpacker.feed(data)
for key, value in unpacker:
yield key, value
if not mapperName:
mbMapperName = ""
else:
names = splitMappers(mapperName)
newNames = []
for name in names:
newNames.append('<a href="%s">%s</a>' % (mapperWebsite(name), escape(name)))
mbMapperName = "<strong>by %s</strong><br>" % makeAndString(newNames)
formattedMapName = escape(originalMapName)
mbMapInfo = ""
try:
with open('maps/%s.msgpack' % originalMapName, 'rb') as inp:
unpacker = msgpack.Unpacker(inp)
width = unpacker.unpack()
height = unpacker.unpack()
tiles = unpacker.unpack()
formattedMapName = '<span title="%dx%d">%s</span>' % (width, height, escape(originalMapName))
mbMapInfo = "<br>"
for tile in sorted(tiles.keys(), key=lambda i:order(i)):
mbMapInfo += '<span title="%s"><img height="32" width="32" src="/tiles/%s.png" alt="%s"></span> ' % (description(tile), description(tile), tile)
except IOError:
pass
mapsString += u'<div id="map-%s" class="block3 info"><h3 class="inline">%s</h3><p class="inline">%s</p><p>Difficulty: %s, Points: %d<br><a href="/maps/?map=%s"><img height="225" width="360" src="/ranks/maps/%s.png" alt="Screenshot" class="screenshot"></a>%s<br><span title="%s">%d tee%s finished%s</span><br>%d team%s finished%s</p></div>\n' % (escape(mapName), formattedMapName, mbMapperName, escape(renderStars(stars)), globalPoints(type, stars), quote_plus(originalMapName), escape(mapName), mbMapInfo, finishTimes, countFinishes, mbS2, escape(avgTime), countTeamFinishes, mbS, escape(biggestTeam))
mapsString += printTeamRecords2("Team Records", "teamrecords", teamRanks)
mapsString += '<br>\n'
def retrieve_from_file(path):
"""
Retrieve encoded objects from a file.
Args:
path (str): file to read from
Returns:
Object[]: list of objects
"""
f = open(path, "rb")
objs = []
unp = msgpack.Unpacker(f, raw=False)
for obj in unp:
objs.append(encoding.msgpack_decode(obj))
f.close()
return objs
def __init__(self, sck):
self._sck = sck
self._unpacker = msgpack.Unpacker()