Просмотр исходного кода

print() is a function in Python 3 (#4754)

cclauss 7 лет назад
Родитель
Сommit
e9063ccf19
3 измененных файлов с 31 добавлено и 27 удалено
  1. 9 8
      benchmarks/python/py_benchmark.py
  2. 14 13
      benchmarks/util/big_query_utils.py
  3. 8 6
      benchmarks/util/run_and_upload.py

+ 9 - 8
benchmarks/python/py_benchmark.py

@@ -1,3 +1,4 @@
+from __future__ import print_function
 import sys
 import sys
 import os
 import os
 import timeit
 import timeit
@@ -138,15 +139,15 @@ if __name__ == "__main__":
     results.append(run_one_test(file))
     results.append(run_one_test(file))
   
   
   if args.json != "no":
   if args.json != "no":
-    print json.dumps(results)
+    print(json.dumps(results))
   else:
   else:
     for result in results:
     for result in results:
-      print "Message %s of dataset file %s" % \
-          (result["message_name"], result["filename"])
-      print "Average time for parse_from_benchmark: %.2f ns" % \
+      print("Message %s of dataset file %s" % \
+          (result["message_name"], result["filename"]))
+      print("Average time for parse_from_benchmark: %.2f ns" % \
           (result["benchmarks"][ \
           (result["benchmarks"][ \
-                      args.behavior_prefix + "_parse_from_benchmark"])
-      print "Average time for serialize_to_benchmark: %.2f ns" % \
+                      args.behavior_prefix + "_parse_from_benchmark"]))
+      print("Average time for serialize_to_benchmark: %.2f ns" % \
           (result["benchmarks"][ \
           (result["benchmarks"][ \
-                      args.behavior_prefix + "_serialize_to_benchmark"])
-      print ""
+                      args.behavior_prefix + "_serialize_to_benchmark"]))
+      print("")

+ 14 - 13
benchmarks/util/big_query_utils.py

@@ -1,5 +1,6 @@
 #!/usr/bin/env python2.7
 #!/usr/bin/env python2.7
 
 
+from __future__ import print_function
 import argparse
 import argparse
 import json
 import json
 import uuid
 import uuid
@@ -37,11 +38,11 @@ def create_dataset(biq_query, project_id, dataset_id):
         dataset_req.execute(num_retries=NUM_RETRIES)
         dataset_req.execute(num_retries=NUM_RETRIES)
     except HttpError as http_error:
     except HttpError as http_error:
         if http_error.resp.status == 409:
         if http_error.resp.status == 409:
-            print 'Warning: The dataset %s already exists' % dataset_id
+            print('Warning: The dataset %s already exists' % dataset_id)
         else:
         else:
             # Note: For more debugging info, print "http_error.content"
             # Note: For more debugging info, print "http_error.content"
-            print 'Error in creating dataset: %s. Err: %s' % (dataset_id,
-                                                              http_error)
+            print('Error in creating dataset: %s. Err: %s' % (dataset_id,
+                                                              http_error))
             is_success = False
             is_success = False
     return is_success
     return is_success
 
 
@@ -109,13 +110,13 @@ def create_table2(big_query,
         table_req = big_query.tables().insert(
         table_req = big_query.tables().insert(
             projectId=project_id, datasetId=dataset_id, body=body)
             projectId=project_id, datasetId=dataset_id, body=body)
         res = table_req.execute(num_retries=NUM_RETRIES)
         res = table_req.execute(num_retries=NUM_RETRIES)
-        print 'Successfully created %s "%s"' % (res['kind'], res['id'])
+        print('Successfully created %s "%s"' % (res['kind'], res['id']))
     except HttpError as http_error:
     except HttpError as http_error:
         if http_error.resp.status == 409:
         if http_error.resp.status == 409:
-            print 'Warning: Table %s already exists' % table_id
+            print('Warning: Table %s already exists' % table_id)
         else:
         else:
-            print 'Error in creating table: %s. Err: %s' % (table_id,
-                                                            http_error)
+            print('Error in creating table: %s. Err: %s' % (table_id,
+                                                            http_error))
             is_success = False
             is_success = False
     return is_success
     return is_success
 
 
@@ -141,9 +142,9 @@ def patch_table(big_query, project_id, dataset_id, table_id, fields_schema):
             tableId=table_id,
             tableId=table_id,
             body=body)
             body=body)
         res = table_req.execute(num_retries=NUM_RETRIES)
         res = table_req.execute(num_retries=NUM_RETRIES)
-        print 'Successfully patched %s "%s"' % (res['kind'], res['id'])
+        print('Successfully patched %s "%s"' % (res['kind'], res['id']))
     except HttpError as http_error:
     except HttpError as http_error:
-        print 'Error in creating table: %s. Err: %s' % (table_id, http_error)
+        print('Error in creating table: %s. Err: %s' % (table_id, http_error))
         is_success = False
         is_success = False
     return is_success
     return is_success
 
 
@@ -159,10 +160,10 @@ def insert_rows(big_query, project_id, dataset_id, table_id, rows_list):
             body=body)
             body=body)
         res = insert_req.execute(num_retries=NUM_RETRIES)
         res = insert_req.execute(num_retries=NUM_RETRIES)
         if res.get('insertErrors', None):
         if res.get('insertErrors', None):
-            print 'Error inserting rows! Response: %s' % res
+            print('Error inserting rows! Response: %s' % res)
             is_success = False
             is_success = False
     except HttpError as http_error:
     except HttpError as http_error:
-        print 'Error inserting rows to the table %s' % table_id
+        print('Error inserting rows to the table %s' % table_id)
         is_success = False
         is_success = False
 
 
     return is_success
     return is_success
@@ -176,8 +177,8 @@ def sync_query_job(big_query, project_id, query, timeout=5000):
             projectId=project_id,
             projectId=project_id,
             body=query_data).execute(num_retries=NUM_RETRIES)
             body=query_data).execute(num_retries=NUM_RETRIES)
     except HttpError as http_error:
     except HttpError as http_error:
-        print 'Query execute job failed with error: %s' % http_error
-        print http_error.content
+        print('Query execute job failed with error: %s' % http_error)
+        print(http_error.content)
     return query_job
     return query_job
 
 
 
 

+ 8 - 6
benchmarks/util/run_and_upload.py

@@ -1,3 +1,5 @@
+from __future__ import print_function
+from __future__ import absolute_import
 import argparse
 import argparse
 import os
 import os
 import re
 import re
@@ -5,7 +7,7 @@ import copy
 import uuid
 import uuid
 import calendar
 import calendar
 import time
 import time
-import big_query_utils
+from . import big_query_utils
 import datetime
 import datetime
 import json
 import json
 # This import depends on the automake rule protoc_middleman, please make sure
 # This import depends on the automake rule protoc_middleman, please make sure
@@ -255,7 +257,7 @@ def upload_result(result_list, metadata):
     if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
     if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
                                        _TABLE + "$" + _NOW,
                                        _TABLE + "$" + _NOW,
                                        [row]):
                                        [row]):
-      print 'Error when uploading result', new_result
+      print('Error when uploading result', new_result)
 
 
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
@@ -280,11 +282,11 @@ if __name__ == "__main__":
   parse_go_result(args.go_input_file)
   parse_go_result(args.go_input_file)
 
 
   metadata = get_metadata()
   metadata = get_metadata()
-  print "uploading cpp results..."
+  print("uploading cpp results...")
   upload_result(cpp_result, metadata)
   upload_result(cpp_result, metadata)
-  print "uploading java results..."
+  print("uploading java results...")
   upload_result(java_result, metadata)
   upload_result(java_result, metadata)
-  print "uploading python results..."
+  print("uploading python results...")
   upload_result(python_result, metadata)
   upload_result(python_result, metadata)
-  print "uploading go results..."
+  print("uploading go results...")
   upload_result(go_result, metadata)
   upload_result(go_result, metadata)