Skip to content

Commit c2eed6e

Browse files
committed
Adding a lot of logs.
1 parent 2748953 commit c2eed6e

File tree

6 files changed

+80
-46
lines changed

6 files changed

+80
-46
lines changed

testing/test_awswrangler/test_pandas.py

Lines changed: 38 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,9 @@
77

88
from awswrangler import Session
99

10-
logging.basicConfig(level=logging.INFO)
10+
logging.basicConfig(
11+
level=logging.INFO,
12+
format="[%(asctime)s][%(levelname)s][%(name)s][%(funcName)s] %(message)s")
1113
logging.getLogger("awswrangler").setLevel(logging.DEBUG)
1214

1315

@@ -64,72 +66,72 @@ def test_read_csv(session, bucket):
6466
("overwrite", "csv", True, [], 1, 1),
6567
("append", "csv", True, [], 1, 2),
6668
("overwrite_partitions", "csv", True, [], 1, 1),
67-
("overwrite", "csv", False, [], 10, 1),
68-
("append", "csv", False, [], 10, 2),
69-
("overwrite_partitions", "csv", False, [], 10, 1),
70-
("overwrite", "csv", True, [], 10, 1),
71-
("append", "csv", True, [], 10, 2),
72-
("overwrite_partitions", "csv", True, [], 10, 1),
69+
("overwrite", "csv", False, [], 5, 1),
70+
("append", "csv", False, [], 5, 2),
71+
("overwrite_partitions", "csv", False, [], 5, 1),
72+
("overwrite", "csv", True, [], 5, 1),
73+
("append", "csv", True, [], 5, 2),
74+
("overwrite_partitions", "csv", True, [], 5, 1),
7375
("overwrite", "csv", False, ["date"], 1, 1),
7476
("append", "csv", False, ["date"], 1, 2),
7577
("overwrite_partitions", "csv", False, ["date"], 1, 1),
7678
("overwrite", "csv", True, ["date"], 1, 1),
7779
("append", "csv", True, ["date"], 1, 2),
7880
("overwrite_partitions", "csv", True, ["date"], 1, 1),
79-
("overwrite", "csv", False, ["date"], 10, 1),
80-
("append", "csv", False, ["date"], 10, 2),
81-
("overwrite_partitions", "csv", False, ["date"], 10, 1),
82-
("overwrite", "csv", True, ["date"], 10, 1),
83-
("append", "csv", True, ["date"], 10, 2),
84-
("overwrite_partitions", "csv", True, ["date"], 10, 1),
81+
("overwrite", "csv", False, ["date"], 5, 1),
82+
("append", "csv", False, ["date"], 5, 2),
83+
("overwrite_partitions", "csv", False, ["date"], 5, 1),
84+
("overwrite", "csv", True, ["date"], 5, 1),
85+
("append", "csv", True, ["date"], 5, 2),
86+
("overwrite_partitions", "csv", True, ["date"], 5, 1),
8587
("overwrite", "csv", False, ["name", "date"], 1, 1),
8688
("append", "csv", False, ["name", "date"], 1, 2),
8789
("overwrite_partitions", "csv", False, ["name", "date"], 1, 1),
8890
("overwrite", "csv", True, ["name", "date"], 1, 1),
8991
("append", "csv", True, ["name", "date"], 1, 2),
9092
("overwrite_partitions", "csv", True, ["name", "date"], 1, 1),
91-
("overwrite", "csv", False, ["name", "date"], 10, 1),
92-
("append", "csv", False, ["name", "date"], 10, 2),
93-
("overwrite_partitions", "csv", False, ["name", "date"], 10, 1),
94-
("overwrite", "csv", True, ["name", "date"], 10, 1),
95-
("append", "csv", True, ["name", "date"], 10, 2),
93+
("overwrite", "csv", False, ["name", "date"], 5, 1),
94+
("append", "csv", False, ["name", "date"], 5, 2),
95+
("overwrite_partitions", "csv", False, ["name", "date"], 5, 1),
96+
("overwrite", "csv", True, ["name", "date"], 5, 1),
97+
("append", "csv", True, ["name", "date"], 5, 2),
9698
("overwrite_partitions", "csv", True, ["name", "date"], 2, 1),
9799
("overwrite", "parquet", False, [], 1, 1),
98100
("append", "parquet", False, [], 1, 2),
99101
("overwrite_partitions", "parquet", False, [], 1, 1),
100102
("overwrite", "parquet", True, [], 1, 1),
101103
("append", "parquet", True, [], 1, 2),
102104
("overwrite_partitions", "parquet", True, [], 1, 1),
103-
("overwrite", "parquet", False, [], 10, 1),
104-
("append", "parquet", False, [], 10, 2),
105-
("overwrite_partitions", "parquet", False, [], 10, 1),
106-
("overwrite", "parquet", True, [], 10, 1),
107-
("append", "parquet", True, [], 10, 2),
108-
("overwrite_partitions", "parquet", True, [], 10, 1),
105+
("overwrite", "parquet", False, [], 5, 1),
106+
("append", "parquet", False, [], 5, 2),
107+
("overwrite_partitions", "parquet", False, [], 5, 1),
108+
("overwrite", "parquet", True, [], 5, 1),
109+
("append", "parquet", True, [], 5, 2),
110+
("overwrite_partitions", "parquet", True, [], 5, 1),
109111
("overwrite", "parquet", False, ["date"], 1, 1),
110112
("append", "parquet", False, ["date"], 1, 2),
111113
("overwrite_partitions", "parquet", False, ["date"], 1, 1),
112114
("overwrite", "parquet", True, ["date"], 1, 1),
113115
("append", "parquet", True, ["date"], 1, 2),
114116
("overwrite_partitions", "parquet", True, ["date"], 1, 1),
115-
("overwrite", "parquet", False, ["date"], 10, 1),
116-
("append", "parquet", False, ["date"], 10, 2),
117-
("overwrite_partitions", "parquet", False, ["date"], 10, 1),
118-
("overwrite", "parquet", True, ["date"], 10, 1),
119-
("append", "parquet", True, ["date"], 10, 2),
120-
("overwrite_partitions", "parquet", True, ["date"], 10, 1),
117+
("overwrite", "parquet", False, ["date"], 5, 1),
118+
("append", "parquet", False, ["date"], 5, 2),
119+
("overwrite_partitions", "parquet", False, ["date"], 5, 1),
120+
("overwrite", "parquet", True, ["date"], 5, 1),
121+
("append", "parquet", True, ["date"], 5, 2),
122+
("overwrite_partitions", "parquet", True, ["date"], 5, 1),
121123
("overwrite", "parquet", False, ["name", "date"], 1, 1),
122124
("append", "parquet", False, ["name", "date"], 1, 2),
123125
("overwrite_partitions", "parquet", False, ["name", "date"], 1, 1),
124126
("overwrite", "parquet", True, ["name", "date"], 1, 1),
125127
("append", "parquet", True, ["name", "date"], 1, 2),
126128
("overwrite_partitions", "parquet", True, ["name", "date"], 1, 1),
127-
("overwrite", "parquet", False, ["name", "date"], 10, 1),
128-
("append", "parquet", False, ["name", "date"], 10, 2),
129-
("overwrite_partitions", "parquet", False, ["name", "date"], 10, 1),
130-
("overwrite", "parquet", True, ["name", "date"], 10, 1),
131-
("append", "parquet", True, ["name", "date"], 10, 2),
132-
("overwrite_partitions", "parquet", True, ["name", "date"], 10, 1),
129+
("overwrite", "parquet", False, ["name", "date"], 5, 1),
130+
("append", "parquet", False, ["name", "date"], 5, 2),
131+
("overwrite_partitions", "parquet", False, ["name", "date"], 5, 1),
132+
("overwrite", "parquet", True, ["name", "date"], 5, 1),
133+
("append", "parquet", True, ["name", "date"], 5, 2),
134+
("overwrite_partitions", "parquet", True, ["name", "date"], 5, 1),
133135
],
134136
)
135137
def test_to_s3(

testing/test_awswrangler/test_redshift.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,9 @@
88

99
from awswrangler import Session, Redshift
1010

11-
logging.basicConfig(level=logging.INFO)
11+
logging.basicConfig(
12+
level=logging.INFO,
13+
format="[%(asctime)s][%(levelname)s][%(name)s][%(funcName)s] %(message)s")
1214
logging.getLogger("awswrangler").setLevel(logging.DEBUG)
1315

1416

testing/test_awswrangler/test_s3.py

Lines changed: 26 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,9 @@
77

88
from awswrangler import Session
99

10-
logging.basicConfig(level=logging.INFO)
10+
logging.basicConfig(
11+
level=logging.INFO,
12+
format="[%(asctime)s][%(levelname)s][%(name)s][%(funcName)s] %(message)s")
1113
logging.getLogger("awswrangler").setLevel(logging.DEBUG)
1214

1315

@@ -43,12 +45,13 @@ def write_fake_objects(bucket, path, num):
4345
if num < 10:
4446
wrt_fake_objs_batch(bucket, path, 0, num - 1)
4547
return
46-
cpus = mp.cpu_count() * 4
48+
cpus = mp.cpu_count()
4749
bounders = calc_bounders(num, cpus)
4850
args = []
4951
for item in bounders:
5052
args.append((bucket, path, item[0], item[1]))
5153
pool = mp.Pool(processes=cpus)
54+
print("Starting parallel writes...")
5255
pool.map(wrt_fake_objs_batch_wrapper, args)
5356

5457

@@ -78,20 +81,27 @@ def bucket(session, cloudformation_outputs):
7881
session.s3.delete_objects(path=f"s3://{bucket}/")
7982

8083

81-
@pytest.mark.parametrize("objects_num", [1, 10, 1001])
84+
@pytest.mark.parametrize("objects_num", [1, 10, 1001, 2001, 3001])
8285
def test_delete_objects(session, bucket, objects_num):
86+
print("Starting writes...")
8387
write_fake_objects(bucket, f"objs-{objects_num}/", objects_num)
88+
print("Starting deletes...")
8489
session.s3.delete_objects(path=f"s3://{bucket}/objs-{objects_num}/")
8590

8691

87-
@pytest.mark.parametrize("objects_num", [1, 10, 1001])
92+
@pytest.mark.parametrize("objects_num", [1, 10, 1001, 2001, 3001])
8893
def test_delete_listed_objects(session, bucket, objects_num):
8994
path = f"s3://{bucket}/objs-listed-{objects_num}/"
95+
print("Starting deletes...")
9096
session.s3.delete_objects(path=path)
97+
print("Starting writes...")
9198
write_fake_objects(bucket, f"objs-listed-{objects_num}/", objects_num)
99+
print("Starting list...")
92100
objects_paths = session.s3.list_objects(path=path)
93101
assert len(objects_paths) == objects_num
102+
print("Starting listed deletes...")
94103
session.s3.delete_listed_objects(objects_paths=objects_paths)
104+
print("Starting list...")
95105
objects_paths = session.s3.list_objects(path=path)
96106
assert len(objects_paths) == 0
97107

@@ -104,26 +114,36 @@ def check_list_with_retry(session, path, length):
104114
return False
105115

106116

107-
@pytest.mark.parametrize("objects_num", [1, 10, 1001])
117+
@pytest.mark.parametrize("objects_num", [1, 10, 1001, 2001, 3001])
108118
def test_delete_not_listed_objects(session, bucket, objects_num):
109119
path = f"s3://{bucket}/objs-not-listed-{objects_num}/"
120+
print("Starting deletes...")
121+
session.s3.delete_objects(path=path)
122+
print("Starting writes...")
110123
write_fake_objects(bucket, f"objs-not-listed-{objects_num}/", objects_num)
124+
print("Starting not listed deletes...")
111125
session.s3.delete_not_listed_objects(objects_paths=[f"{path}0"])
126+
print("Starting checks...")
112127
assert check_list_with_retry(session=session, path=path, length=1)
128+
print("Starting deletes...")
113129
session.s3.delete_objects(path=path)
114130

115131

116-
@pytest.mark.parametrize("objects_num", [1, 10, 1001])
132+
@pytest.mark.parametrize("objects_num", [1, 10, 1001, 2001, 3001])
117133
def test_get_objects_sizes(session, bucket, objects_num):
118134
path = f"s3://{bucket}/objs-get-objects-sizes-{objects_num}/"
135+
print("Starting deletes...")
119136
session.s3.delete_objects(path=path)
137+
print("Starting writes...")
120138
write_fake_objects(bucket, f"objs-get-objects-sizes-{objects_num}/",
121139
objects_num)
122140
objects_paths = [
123141
f"s3://{bucket}/objs-get-objects-sizes-{objects_num}/{i}"
124142
for i in range(objects_num)
125143
]
144+
print("Starting gets...")
126145
objects_sizes = session.s3.get_objects_sizes(objects_paths=objects_paths)
146+
print("Starting deletes...")
127147
session.s3.delete_objects(path=path)
128148
for _, object_size in objects_sizes.items():
129149
assert object_size == 10

testing/test_awswrangler/test_session.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,9 @@
55

66
from awswrangler import Session
77

8-
logging.basicConfig(level=logging.INFO)
8+
logging.basicConfig(
9+
level=logging.INFO,
10+
format="[%(asctime)s][%(levelname)s][%(name)s][%(funcName)s] %(message)s")
911
logging.getLogger("awswrangler").setLevel(logging.DEBUG)
1012

1113

@@ -24,6 +26,10 @@ def test_session(default_session):
2426
assert_account_id(default_session)
2527

2628

29+
def test_session_region():
30+
assert_account_id(Session(region_name="us-east-1"))
31+
32+
2733
def test_from_boto3_session(default_session):
2834
assert_account_id(Session(boto3_session=default_session.boto3_session))
2935

testing/test_awswrangler/test_spark.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,9 @@
66

77
from awswrangler import Session
88

9-
logging.basicConfig(level=logging.INFO)
9+
logging.basicConfig(
10+
level=logging.INFO,
11+
format="[%(asctime)s][%(levelname)s][%(name)s][%(funcName)s] %(message)s")
1012
logging.getLogger("awswrangler").setLevel(logging.DEBUG)
1113

1214

testing/test_awswrangler/test_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44

55
import awswrangler
66

7-
logging.basicConfig(level=logging.INFO)
7+
logging.basicConfig(
8+
level=logging.INFO,
9+
format="[%(asctime)s][%(levelname)s][%(name)s][%(funcName)s] %(message)s")
810
logging.getLogger("awswrangler").setLevel(logging.DEBUG)
911

1012

0 commit comments

Comments
 (0)