session = boto3.session.Session()
s3 = session.resource('s3')
-data = open(sys.argv[1], 'rb')
-s3.Bucket('ossci-windows-build').put_object(Key='pytorch/'+IMAGE_COMMIT_TAG+'.7z', Body=data)
+with open(sys.argv[1], 'rb') as data:
+ s3.Bucket('ossci-windows-build').put_object(Key='pytorch/'+IMAGE_COMMIT_TAG+'.7z', Body=data)
object_acl = s3.ObjectAcl('ossci-windows-build','pytorch/'+IMAGE_COMMIT_TAG+'.7z')
response = object_acl.put(ACL='public-read')
results = ([], [], []) # match, mismatch, error
for x in names:
try:
- ax = open(os.path.join(a, x), 'r').read().replace('\r\n', '\n').replace('\r', '\n')
- bx = open(os.path.join(b, x), 'r').read().replace('\r\n', '\n').replace('\r', '\n')
+ with open(os.path.join(a, x)) as f:
+ ax = f.read().replace('\r\n', '\n').replace('\r', '\n')
+ with open(os.path.join(b, x)) as f:
+ bx = f.read().replace('\r\n', '\n').replace('\r', '\n')
if ax == bx:
results[0].append(x)
else:
cPickle.dump(data, fo, protocol=cPickle.HIGHEST_PROTOCOL)
def unpickle(filename):
- fo = open(filename, 'r')
- contents = cPickle.load(fo)
- fo.close()
- return contents
+ with open(filename) as fo:
+ return cPickle.load(fo)
def partition_list(l, partition_size):
divup = lambda a,b: (a + b - 1) / b
if type(filename) == str:
fo = open(filename, "w")
- cPickle.dump(data, fo, protocol=cPickle.HIGHEST_PROTOCOL)
- fo.close()
+ with fo:
+ cPickle.dump(data, fo, protocol=cPickle.HIGHEST_PROTOCOL)
def unpickle(filename):
if not os.path.exists(filename):
raise UnpickleError("Path '%s' does not exist." % filename)
- fo = open(filename, 'r')
- z = StringIO()
- file_size = os.fstat(fo.fileno()).st_size
- # Read 1GB at a time to avoid overflow
- while fo.tell() < file_size:
- z.write(fo.read(1 << 30))
- fo.close()
- dict = cPickle.loads(z.getvalue())
- z.close()
-
- return dict
+ with open(filename) as fo, StringIO() as z:
+ file_size = os.fstat(fo.fileno()).st_size
+ # Read 1GB at a time to avoid overflow
+ while fo.tell() < file_size:
+ z.write(fo.read(1 << 30))
+ return cPickle.loads(z.getvalue())
def is_intel_machine():
VENDOR_ID_REGEX = re.compile(r'^vendor_id\s+: (\S+)')
- f = open('/proc/cpuinfo')
- for line in f:
- m = VENDOR_ID_REGEX.match(line)
- if m:
- f.close()
- return m.group(1) == 'GenuineIntel'
- f.close()
+ with open('/proc/cpuinfo') as f:
+ for line in f:
+ m = VENDOR_ID_REGEX.match(line)
+ if m:
+ return m.group(1) == 'GenuineIntel'
return False
# Returns the CPUs associated with a given GPU
if line.startswith('Bus Location'):
bus_id = line.split(':', 1)[1].strip()
bus_id = bus_id[:7] + ':' + bus_id[8:]
- ff = open('/sys/module/nvidia/drivers/pci:nvidia/%s/local_cpulist' % bus_id)
- cpus_str = ff.readline()
- ff.close()
+ with open('/sys/module/nvidia/drivers/pci:nvidia/%s/local_cpulist' % bus_id) as ff:
+ cpus_str = ff.readline()
cpus = [cpu for s in cpus_str.split(',') for cpu in range(int(s.split('-')[0]),int(s.split('-')[1])+1)]
return cpus
return [-1]
filename = "embedding_lookup_fused_8bit_rowwise_avx2.cc"
else:
filename = "embedding_lookup_avx2.cc"
-fout = open(filename, "w")
options = [
["int32_t", "int32_t", "float", "float", "float", "float"],
code.append("} // namespace caffe2")
-for c in code:
- # print(c, file = fout)
- fout.write(c + "\n")
-fout.close()
+with open(filename, "w") as fout:
+ for c in code:
+ # print(c, file = fout)
+ fout.write(c + "\n")
print("Created " + filename)
output_init_net = args.init_net
output_predict_net = args.predict_net
- text_format.Merge(
- open(input_proto, 'r').read(), caffenet
- )
- caffenet_pretrained.ParseFromString(
- open(input_caffemodel, 'rb').read()
- )
+ with open(input_proto) as f:
+ text_format.Merge(f.read(), caffenet)
+ with open(input_caffemodel, 'rb') as f:
+ caffenet_pretrained.ParseFromString(f.read())
net, pretrained_params = TranslateModel(
caffenet, caffenet_pretrained, is_test=True,
remove_legacy_pad=args.remove_legacy_pad,
# We will do all the computation stuff in the global space.
caffenet = caffe_pb2.NetParameter()
caffenet_pretrained = caffe_pb2.NetParameter()
- text_format.Merge(
- open('data/testdata/caffe_translator/deploy.prototxt').read(), caffenet
- )
- caffenet_pretrained.ParseFromString(
- open(
- 'data/testdata/caffe_translator/bvlc_reference_caffenet.caffemodel')
- .read()
- )
+ with open('data/testdata/caffe_translator/deploy.prototxt') as f:
+ text_format.Merge(f.read(), caffenet)
+ with open('data/testdata/caffe_translator/'
+ 'bvlc_reference_caffenet.caffemodel') as f:
+ caffenet_pretrained.ParseFromString(f.read())
for remove_legacy_pad in [True, False]:
net, pretrained_params = caffe_translator.TranslateModel(
caffenet, caffenet_pretrained, is_test=True,
def ConvertProtoToBinary(proto_class, filename, out_filename):
"""Convert a text file of the given protobuf class to binary."""
- proto = TryReadProtoWithClass(proto_class, open(filename).read())
+ with open(filename) as f:
+ proto = TryReadProtoWithClass(proto_class, f.read())
with open(out_filename, 'w') as fid:
fid.write(proto.SerializeToString())
os.mkdir(os.path.join(tmp_dir, 'test_dir'))
if backend == 'mpi':
# test mpiexec for --noprefix option
- devnull = open(os.devnull, 'w')
- noprefix_opt = '--noprefix' if subprocess.call(
- 'mpiexec -n 1 --noprefix bash -c ""', shell=True,
- stdout=devnull, stderr=subprocess.STDOUT) == 0 else ''
+ with open(os.devnull, 'w') as devnull:
+ noprefix_opt = '--noprefix' if subprocess.call(
+ 'mpiexec -n 1 --noprefix bash -c ""', shell=True,
+ stdout=devnull, stderr=subprocess.STDOUT) == 0 else ''
mpiexec = ['mpiexec', '-n', '3', noprefix_opt] + executable